Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrVkUtil.h" |
| 9 | |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 10 | #include "vk/GrVkGpu.h" |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 11 | #include "SkSLCompiler.h" |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 12 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 13 | bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) { |
| 14 | VkFormat dontCare; |
| 15 | if (!format) { |
| 16 | format = &dontCare; |
| 17 | } |
| 18 | |
| 19 | switch (config) { |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 20 | case kUnknown_GrPixelConfig: |
| 21 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 22 | case kRGBA_8888_GrPixelConfig: |
| 23 | *format = VK_FORMAT_R8G8B8A8_UNORM; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 24 | return true; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 25 | case kBGRA_8888_GrPixelConfig: |
| 26 | *format = VK_FORMAT_B8G8R8A8_UNORM; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 27 | return true; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 28 | case kSRGBA_8888_GrPixelConfig: |
| 29 | *format = VK_FORMAT_R8G8B8A8_SRGB; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 30 | return true; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 31 | case kSBGRA_8888_GrPixelConfig: |
| 32 | *format = VK_FORMAT_B8G8R8A8_SRGB; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 33 | return true; |
| 34 | case kRGBA_8888_sint_GrPixelConfig: |
| 35 | *format = VK_FORMAT_R8G8B8A8_SINT; |
| 36 | return true; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 37 | case kRGB_565_GrPixelConfig: |
| 38 | *format = VK_FORMAT_R5G6B5_UNORM_PACK16; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 39 | return true; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 40 | case kRGBA_4444_GrPixelConfig: |
egdaniel | 3fe0327 | 2016-08-15 10:59:17 -0700 | [diff] [blame] | 41 | // R4G4B4A4 is not required to be supported so we actually |
| 42 | // store the data is if it was B4G4R4A4 and swizzle in shaders |
| 43 | *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 44 | return true; |
Greg Daniel | ef59d87 | 2017-11-17 16:47:21 -0500 | [diff] [blame] | 45 | case kAlpha_8_GrPixelConfig: // fall through |
| 46 | case kAlpha_8_as_Red_GrPixelConfig: |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 47 | *format = VK_FORMAT_R8_UNORM; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 48 | return true; |
Greg Daniel | ef59d87 | 2017-11-17 16:47:21 -0500 | [diff] [blame] | 49 | case kAlpha_8_as_Alpha_GrPixelConfig: |
| 50 | return false; |
Brian Osman | 986563b | 2017-01-10 14:20:02 -0500 | [diff] [blame] | 51 | case kGray_8_GrPixelConfig: |
Greg Daniel | 7af060a | 2017-12-05 16:27:11 -0500 | [diff] [blame] | 52 | case kGray_8_as_Red_GrPixelConfig: |
Brian Osman | 986563b | 2017-01-10 14:20:02 -0500 | [diff] [blame] | 53 | *format = VK_FORMAT_R8_UNORM; |
| 54 | return true; |
Greg Daniel | 7af060a | 2017-12-05 16:27:11 -0500 | [diff] [blame] | 55 | case kGray_8_as_Lum_GrPixelConfig: |
| 56 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 57 | case kRGBA_float_GrPixelConfig: |
| 58 | *format = VK_FORMAT_R32G32B32A32_SFLOAT; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 59 | return true; |
csmartdalton | 6aa0e11 | 2017-02-08 16:14:11 -0500 | [diff] [blame] | 60 | case kRG_float_GrPixelConfig: |
| 61 | *format = VK_FORMAT_R32G32_SFLOAT; |
| 62 | return true; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 63 | case kRGBA_half_GrPixelConfig: |
| 64 | *format = VK_FORMAT_R16G16B16A16_SFLOAT; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 65 | return true; |
Greg Daniel | ef59d87 | 2017-11-17 16:47:21 -0500 | [diff] [blame] | 66 | case kAlpha_half_GrPixelConfig: // fall through |
| 67 | case kAlpha_half_as_Red_GrPixelConfig: |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 68 | *format = VK_FORMAT_R16_SFLOAT; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 69 | return true; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 70 | } |
Ben Wagner | b4aab9a | 2017-08-16 10:53:04 -0400 | [diff] [blame] | 71 | SK_ABORT("Unexpected config"); |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 72 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 73 | } |
| 74 | |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 75 | GrPixelConfig GrVkFormatToPixelConfig(VkFormat format) { |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 76 | switch (format) { |
| 77 | case VK_FORMAT_R8G8B8A8_UNORM: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 78 | return kRGBA_8888_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 79 | case VK_FORMAT_B8G8R8A8_UNORM: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 80 | return kBGRA_8888_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 81 | case VK_FORMAT_R8G8B8A8_SRGB: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 82 | return kSRGBA_8888_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 83 | case VK_FORMAT_B8G8R8A8_SRGB: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 84 | return kSBGRA_8888_GrPixelConfig; |
Brian Salomon | bf7b620 | 2016-11-11 16:08:03 -0500 | [diff] [blame] | 85 | case VK_FORMAT_R8G8B8A8_SINT: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 86 | return kRGBA_8888_sint_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 87 | case VK_FORMAT_R5G6B5_UNORM_PACK16: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 88 | return kRGB_565_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 89 | break; |
egdaniel | 3fe0327 | 2016-08-15 10:59:17 -0700 | [diff] [blame] | 90 | case VK_FORMAT_B4G4R4A4_UNORM_PACK16: |
| 91 | // R4G4B4A4 is not required to be supported so we actually |
| 92 | // store RGBA_4444 data as B4G4R4A4. |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 93 | return kRGBA_4444_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 94 | case VK_FORMAT_R8_UNORM: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 95 | return kAlpha_8_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 96 | case VK_FORMAT_R32G32B32A32_SFLOAT: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 97 | return kRGBA_float_GrPixelConfig; |
csmartdalton | 6aa0e11 | 2017-02-08 16:14:11 -0500 | [diff] [blame] | 98 | case VK_FORMAT_R32G32_SFLOAT: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 99 | return kRG_float_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 100 | case VK_FORMAT_R16G16B16A16_SFLOAT: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 101 | return kRGBA_half_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 102 | case VK_FORMAT_R16_SFLOAT: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 103 | return kAlpha_half_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 104 | default: |
Greg Daniel | 9440345 | 2017-04-18 15:52:36 -0400 | [diff] [blame] | 105 | return kUnknown_GrPixelConfig; |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 106 | } |
jvanverth | 9f37246 | 2016-04-06 06:08:59 -0700 | [diff] [blame] | 107 | } |
| 108 | |
Greg Daniel | 81b8059 | 2017-12-13 10:20:04 -0500 | [diff] [blame^] | 109 | bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) { |
| 110 | switch (format) { |
| 111 | case VK_FORMAT_R8G8B8A8_UNORM: |
| 112 | return kRGBA_8888_GrPixelConfig == config; |
| 113 | case VK_FORMAT_B8G8R8A8_UNORM: |
| 114 | return kBGRA_8888_GrPixelConfig == config; |
| 115 | case VK_FORMAT_R8G8B8A8_SRGB: |
| 116 | return kSRGBA_8888_GrPixelConfig == config; |
| 117 | case VK_FORMAT_B8G8R8A8_SRGB: |
| 118 | return kSBGRA_8888_GrPixelConfig == config; |
| 119 | case VK_FORMAT_R8G8B8A8_SINT: |
| 120 | return kRGBA_8888_sint_GrPixelConfig == config; |
| 121 | case VK_FORMAT_R5G6B5_UNORM_PACK16: |
| 122 | return kRGB_565_GrPixelConfig == config; |
| 123 | case VK_FORMAT_B4G4R4A4_UNORM_PACK16: |
| 124 | // R4G4B4A4 is not required to be supported so we actually |
| 125 | // store RGBA_4444 data as B4G4R4A4. |
| 126 | return kRGBA_4444_GrPixelConfig == config; |
| 127 | case VK_FORMAT_R8_UNORM: |
| 128 | return kAlpha_8_GrPixelConfig == config || |
| 129 | kAlpha_8_as_Red_GrPixelConfig == config || |
| 130 | kGray_8_GrPixelConfig == config || |
| 131 | kGray_8_as_Red_GrPixelConfig == config; |
| 132 | case VK_FORMAT_R32G32B32A32_SFLOAT: |
| 133 | return kRGBA_float_GrPixelConfig == config; |
| 134 | case VK_FORMAT_R32G32_SFLOAT: |
| 135 | return kRG_float_GrPixelConfig == config; |
| 136 | case VK_FORMAT_R16G16B16A16_SFLOAT: |
| 137 | return kRGBA_half_GrPixelConfig == config; |
| 138 | case VK_FORMAT_R16_SFLOAT: |
| 139 | return kAlpha_half_GrPixelConfig == config || |
| 140 | kAlpha_half_as_Red_GrPixelConfig == config; |
| 141 | default: |
| 142 | return false; |
| 143 | } |
| 144 | } |
| 145 | |
| 146 | bool GrVkFormatIsSupported(VkFormat format) { |
| 147 | switch (format) { |
| 148 | case VK_FORMAT_R8G8B8A8_UNORM: |
| 149 | case VK_FORMAT_B8G8R8A8_UNORM: |
| 150 | case VK_FORMAT_R8G8B8A8_SRGB: |
| 151 | case VK_FORMAT_B8G8R8A8_SRGB: |
| 152 | case VK_FORMAT_R8G8B8A8_SINT: |
| 153 | case VK_FORMAT_R5G6B5_UNORM_PACK16: |
| 154 | case VK_FORMAT_B4G4R4A4_UNORM_PACK16: |
| 155 | case VK_FORMAT_R8_UNORM: |
| 156 | case VK_FORMAT_R32G32B32A32_SFLOAT: |
| 157 | case VK_FORMAT_R32G32_SFLOAT: |
| 158 | case VK_FORMAT_R16G16B16A16_SFLOAT: |
| 159 | case VK_FORMAT_R16_SFLOAT: |
| 160 | return true; |
| 161 | default: |
| 162 | return false; |
| 163 | } |
| 164 | } |
| 165 | |
brianosman | f05ab1b | 2016-05-12 11:01:10 -0700 | [diff] [blame] | 166 | bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) { |
| 167 | VkFormat linearFmt = format; |
| 168 | switch (format) { |
| 169 | case VK_FORMAT_R8_SRGB: |
| 170 | linearFmt = VK_FORMAT_R8_UNORM; |
| 171 | break; |
| 172 | case VK_FORMAT_R8G8_SRGB: |
| 173 | linearFmt = VK_FORMAT_R8G8_UNORM; |
| 174 | break; |
| 175 | case VK_FORMAT_R8G8B8_SRGB: |
| 176 | linearFmt = VK_FORMAT_R8G8B8_UNORM; |
| 177 | break; |
| 178 | case VK_FORMAT_B8G8R8_SRGB: |
| 179 | linearFmt = VK_FORMAT_B8G8R8_UNORM; |
| 180 | break; |
| 181 | case VK_FORMAT_R8G8B8A8_SRGB: |
| 182 | linearFmt = VK_FORMAT_R8G8B8A8_UNORM; |
| 183 | break; |
| 184 | case VK_FORMAT_B8G8R8A8_SRGB: |
| 185 | linearFmt = VK_FORMAT_B8G8R8A8_UNORM; |
| 186 | break; |
| 187 | case VK_FORMAT_A8B8G8R8_SRGB_PACK32: |
| 188 | linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32; |
| 189 | break; |
| 190 | case VK_FORMAT_BC1_RGB_SRGB_BLOCK: |
| 191 | linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK; |
| 192 | break; |
| 193 | case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: |
| 194 | linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK; |
| 195 | break; |
| 196 | case VK_FORMAT_BC2_SRGB_BLOCK: |
| 197 | linearFmt = VK_FORMAT_BC2_UNORM_BLOCK; |
| 198 | break; |
| 199 | case VK_FORMAT_BC3_SRGB_BLOCK: |
| 200 | linearFmt = VK_FORMAT_BC3_UNORM_BLOCK; |
| 201 | break; |
| 202 | case VK_FORMAT_BC7_SRGB_BLOCK: |
| 203 | linearFmt = VK_FORMAT_BC7_UNORM_BLOCK; |
| 204 | break; |
| 205 | case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: |
| 206 | linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; |
| 207 | break; |
| 208 | case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: |
| 209 | linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK; |
| 210 | break; |
| 211 | case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: |
| 212 | linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK; |
| 213 | break; |
| 214 | case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: |
| 215 | linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK; |
| 216 | break; |
| 217 | case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: |
| 218 | linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK; |
| 219 | break; |
| 220 | case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: |
| 221 | linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK; |
| 222 | break; |
| 223 | case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: |
| 224 | linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK; |
| 225 | break; |
| 226 | case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: |
| 227 | linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK; |
| 228 | break; |
| 229 | case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: |
| 230 | linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK; |
| 231 | break; |
| 232 | case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: |
| 233 | linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK; |
| 234 | break; |
| 235 | case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: |
| 236 | linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK; |
| 237 | break; |
| 238 | case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: |
| 239 | linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK; |
| 240 | break; |
| 241 | case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: |
| 242 | linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK; |
| 243 | break; |
| 244 | case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: |
| 245 | linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK; |
| 246 | break; |
| 247 | case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: |
| 248 | linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK; |
| 249 | break; |
| 250 | case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: |
| 251 | linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK; |
| 252 | break; |
| 253 | case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: |
| 254 | linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK; |
| 255 | break; |
| 256 | default: |
| 257 | break; |
| 258 | } |
| 259 | if (linearFormat) { |
| 260 | *linearFormat = linearFmt; |
| 261 | } |
| 262 | return (linearFmt != format); |
| 263 | } |
| 264 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 265 | bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) { |
| 266 | switch (samples) { |
| 267 | case 0: // fall through |
| 268 | case 1: |
| 269 | *vkSamples = VK_SAMPLE_COUNT_1_BIT; |
| 270 | return true; |
| 271 | case 2: |
| 272 | *vkSamples = VK_SAMPLE_COUNT_2_BIT; |
| 273 | return true; |
| 274 | case 4: |
egdaniel | bf63e61 | 2016-08-17 06:26:16 -0700 | [diff] [blame] | 275 | *vkSamples = VK_SAMPLE_COUNT_4_BIT; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 276 | return true; |
| 277 | case 8: |
egdaniel | bf63e61 | 2016-08-17 06:26:16 -0700 | [diff] [blame] | 278 | *vkSamples = VK_SAMPLE_COUNT_8_BIT; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 279 | return true; |
| 280 | case 16: |
egdaniel | bf63e61 | 2016-08-17 06:26:16 -0700 | [diff] [blame] | 281 | *vkSamples = VK_SAMPLE_COUNT_16_BIT; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 282 | return true; |
| 283 | case 32: |
egdaniel | bf63e61 | 2016-08-17 06:26:16 -0700 | [diff] [blame] | 284 | *vkSamples = VK_SAMPLE_COUNT_32_BIT; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 285 | return true; |
| 286 | case 64: |
egdaniel | bf63e61 | 2016-08-17 06:26:16 -0700 | [diff] [blame] | 287 | *vkSamples = VK_SAMPLE_COUNT_64_BIT; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 288 | return true; |
| 289 | default: |
| 290 | return false; |
| 291 | } |
| 292 | } |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 293 | |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 294 | SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) { |
| 295 | if (VK_SHADER_STAGE_VERTEX_BIT == stage) { |
| 296 | return SkSL::Program::kVertex_Kind; |
| 297 | } |
Chris Dalton | 33607c6 | 2017-07-07 11:00:48 -0600 | [diff] [blame] | 298 | if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) { |
| 299 | return SkSL::Program::kGeometry_Kind; |
| 300 | } |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 301 | SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage); |
| 302 | return SkSL::Program::kFragment_Kind; |
| 303 | } |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 304 | |
| 305 | VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) { |
| 306 | if (SkSL::Program::kVertex_Kind == kind) { |
| 307 | return VK_SHADER_STAGE_VERTEX_BIT; |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 308 | } |
Chris Dalton | 33607c6 | 2017-07-07 11:00:48 -0600 | [diff] [blame] | 309 | if (SkSL::Program::kGeometry_Kind == kind) { |
| 310 | return VK_SHADER_STAGE_GEOMETRY_BIT; |
| 311 | } |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 312 | SkASSERT(SkSL::Program::kFragment_Kind == kind); |
| 313 | return VK_SHADER_STAGE_FRAGMENT_BIT; |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 314 | } |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 315 | |
| 316 | bool GrCompileVkShaderModule(const GrVkGpu* gpu, |
| 317 | const char* shaderString, |
| 318 | VkShaderStageFlagBits stage, |
| 319 | VkShaderModule* shaderModule, |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 320 | VkPipelineShaderStageCreateInfo* stageInfo, |
| 321 | const SkSL::Program::Settings& settings, |
| 322 | SkSL::Program::Inputs* outInputs) { |
| 323 | std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram( |
| 324 | vk_shader_stage_to_skiasl_kind(stage), |
Brian Osman | 93ba0a4 | 2017-08-14 14:48:10 -0400 | [diff] [blame] | 325 | SkSL::String(shaderString), |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 326 | settings); |
| 327 | if (!program) { |
| 328 | SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str()); |
| 329 | SkASSERT(false); |
| 330 | } |
| 331 | *outInputs = program->fInputs; |
Ethan Nicholas | 0df1b04 | 2017-03-31 13:56:23 -0400 | [diff] [blame] | 332 | SkSL::String code; |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 333 | if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) { |
| 334 | SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str()); |
| 335 | return false; |
| 336 | } |
| 337 | |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 338 | VkShaderModuleCreateInfo moduleCreateInfo; |
| 339 | memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo)); |
| 340 | moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; |
| 341 | moduleCreateInfo.pNext = nullptr; |
| 342 | moduleCreateInfo.flags = 0; |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 343 | moduleCreateInfo.codeSize = code.size(); |
| 344 | moduleCreateInfo.pCode = (const uint32_t*)code.c_str(); |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 345 | |
| 346 | VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(), |
| 347 | &moduleCreateInfo, |
| 348 | nullptr, |
| 349 | shaderModule)); |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 350 | if (err) { |
| 351 | return false; |
| 352 | } |
| 353 | |
| 354 | memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo)); |
| 355 | stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; |
| 356 | stageInfo->pNext = nullptr; |
| 357 | stageInfo->flags = 0; |
Ethan Nicholas | 941e7e2 | 2016-12-12 15:33:30 -0500 | [diff] [blame] | 358 | stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind); |
egdaniel | 88987d8 | 2016-09-19 10:17:34 -0700 | [diff] [blame] | 359 | stageInfo->module = *shaderModule; |
| 360 | stageInfo->pName = "main"; |
| 361 | stageInfo->pSpecializationInfo = nullptr; |
| 362 | |
| 363 | return true; |
| 364 | } |