blob: 4f0acdb8f4f61d4170e46054ad5a652e4f364721 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkUtil.h"
9
egdaniel88987d82016-09-19 10:17:34 -070010#include "vk/GrVkGpu.h"
egdaniel88987d82016-09-19 10:17:34 -070011#include "SkSLCompiler.h"
egdaniel88987d82016-09-19 10:17:34 -070012
Greg Daniel164a9f02016-02-22 09:56:40 -050013bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14 VkFormat dontCare;
15 if (!format) {
16 format = &dontCare;
17 }
18
19 switch (config) {
Brian Salomonbf7b6202016-11-11 16:08:03 -050020 case kUnknown_GrPixelConfig:
21 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -050022 case kRGBA_8888_GrPixelConfig:
23 *format = VK_FORMAT_R8G8B8A8_UNORM;
Brian Salomonbf7b6202016-11-11 16:08:03 -050024 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050025 case kBGRA_8888_GrPixelConfig:
26 *format = VK_FORMAT_B8G8R8A8_UNORM;
Brian Salomonbf7b6202016-11-11 16:08:03 -050027 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050028 case kSRGBA_8888_GrPixelConfig:
29 *format = VK_FORMAT_R8G8B8A8_SRGB;
Brian Salomonbf7b6202016-11-11 16:08:03 -050030 return true;
jvanverth9f372462016-04-06 06:08:59 -070031 case kSBGRA_8888_GrPixelConfig:
32 *format = VK_FORMAT_B8G8R8A8_SRGB;
Brian Salomonbf7b6202016-11-11 16:08:03 -050033 return true;
Brian Osman10fc6fd2018-03-02 11:01:10 -050034 case kRGBA_1010102_GrPixelConfig:
35 *format = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
36 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050037 case kRGB_565_GrPixelConfig:
38 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
Brian Salomonbf7b6202016-11-11 16:08:03 -050039 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050040 case kRGBA_4444_GrPixelConfig:
egdaniel3fe03272016-08-15 10:59:17 -070041 // R4G4B4A4 is not required to be supported so we actually
42 // store the data is if it was B4G4R4A4 and swizzle in shaders
43 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
Brian Salomonbf7b6202016-11-11 16:08:03 -050044 return true;
Greg Danielef59d872017-11-17 16:47:21 -050045 case kAlpha_8_GrPixelConfig: // fall through
46 case kAlpha_8_as_Red_GrPixelConfig:
Greg Daniel164a9f02016-02-22 09:56:40 -050047 *format = VK_FORMAT_R8_UNORM;
Brian Salomonbf7b6202016-11-11 16:08:03 -050048 return true;
Greg Danielef59d872017-11-17 16:47:21 -050049 case kAlpha_8_as_Alpha_GrPixelConfig:
50 return false;
Brian Osman986563b2017-01-10 14:20:02 -050051 case kGray_8_GrPixelConfig:
Greg Daniel7af060a2017-12-05 16:27:11 -050052 case kGray_8_as_Red_GrPixelConfig:
Brian Osman986563b2017-01-10 14:20:02 -050053 *format = VK_FORMAT_R8_UNORM;
54 return true;
Greg Daniel7af060a2017-12-05 16:27:11 -050055 case kGray_8_as_Lum_GrPixelConfig:
56 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -050057 case kRGBA_float_GrPixelConfig:
58 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
Brian Salomonbf7b6202016-11-11 16:08:03 -050059 return true;
csmartdalton6aa0e112017-02-08 16:14:11 -050060 case kRG_float_GrPixelConfig:
61 *format = VK_FORMAT_R32G32_SFLOAT;
62 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050063 case kRGBA_half_GrPixelConfig:
64 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
Brian Salomonbf7b6202016-11-11 16:08:03 -050065 return true;
Greg Danielef59d872017-11-17 16:47:21 -050066 case kAlpha_half_GrPixelConfig: // fall through
67 case kAlpha_half_as_Red_GrPixelConfig:
Greg Daniel164a9f02016-02-22 09:56:40 -050068 *format = VK_FORMAT_R16_SFLOAT;
Brian Salomonbf7b6202016-11-11 16:08:03 -050069 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050070 }
Ben Wagnerb4aab9a2017-08-16 10:53:04 -040071 SK_ABORT("Unexpected config");
Brian Salomonbf7b6202016-11-11 16:08:03 -050072 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -050073}
74
Greg Daniel94403452017-04-18 15:52:36 -040075GrPixelConfig GrVkFormatToPixelConfig(VkFormat format) {
jvanverth9f372462016-04-06 06:08:59 -070076 switch (format) {
77 case VK_FORMAT_R8G8B8A8_UNORM:
Greg Daniel94403452017-04-18 15:52:36 -040078 return kRGBA_8888_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070079 case VK_FORMAT_B8G8R8A8_UNORM:
Greg Daniel94403452017-04-18 15:52:36 -040080 return kBGRA_8888_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070081 case VK_FORMAT_R8G8B8A8_SRGB:
Greg Daniel94403452017-04-18 15:52:36 -040082 return kSRGBA_8888_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070083 case VK_FORMAT_B8G8R8A8_SRGB:
Greg Daniel94403452017-04-18 15:52:36 -040084 return kSBGRA_8888_GrPixelConfig;
Brian Osman10fc6fd2018-03-02 11:01:10 -050085 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
86 return kRGBA_1010102_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070087 case VK_FORMAT_R5G6B5_UNORM_PACK16:
Greg Daniel94403452017-04-18 15:52:36 -040088 return kRGB_565_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070089 break;
egdaniel3fe03272016-08-15 10:59:17 -070090 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
91 // R4G4B4A4 is not required to be supported so we actually
92 // store RGBA_4444 data as B4G4R4A4.
Greg Daniel94403452017-04-18 15:52:36 -040093 return kRGBA_4444_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070094 case VK_FORMAT_R8_UNORM:
Greg Daniel94403452017-04-18 15:52:36 -040095 return kAlpha_8_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070096 case VK_FORMAT_R32G32B32A32_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -040097 return kRGBA_float_GrPixelConfig;
csmartdalton6aa0e112017-02-08 16:14:11 -050098 case VK_FORMAT_R32G32_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -040099 return kRG_float_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -0700100 case VK_FORMAT_R16G16B16A16_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -0400101 return kRGBA_half_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -0700102 case VK_FORMAT_R16_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -0400103 return kAlpha_half_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -0700104 default:
Greg Daniel94403452017-04-18 15:52:36 -0400105 return kUnknown_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -0700106 }
jvanverth9f372462016-04-06 06:08:59 -0700107}
108
Greg Daniel81b80592017-12-13 10:20:04 -0500109bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) {
110 switch (format) {
111 case VK_FORMAT_R8G8B8A8_UNORM:
112 return kRGBA_8888_GrPixelConfig == config;
113 case VK_FORMAT_B8G8R8A8_UNORM:
114 return kBGRA_8888_GrPixelConfig == config;
115 case VK_FORMAT_R8G8B8A8_SRGB:
116 return kSRGBA_8888_GrPixelConfig == config;
117 case VK_FORMAT_B8G8R8A8_SRGB:
118 return kSBGRA_8888_GrPixelConfig == config;
Brian Osman10fc6fd2018-03-02 11:01:10 -0500119 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
120 return kRGBA_1010102_GrPixelConfig == config;
Greg Daniel81b80592017-12-13 10:20:04 -0500121 case VK_FORMAT_R5G6B5_UNORM_PACK16:
122 return kRGB_565_GrPixelConfig == config;
123 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
124 // R4G4B4A4 is not required to be supported so we actually
125 // store RGBA_4444 data as B4G4R4A4.
126 return kRGBA_4444_GrPixelConfig == config;
127 case VK_FORMAT_R8_UNORM:
128 return kAlpha_8_GrPixelConfig == config ||
129 kAlpha_8_as_Red_GrPixelConfig == config ||
130 kGray_8_GrPixelConfig == config ||
131 kGray_8_as_Red_GrPixelConfig == config;
132 case VK_FORMAT_R32G32B32A32_SFLOAT:
133 return kRGBA_float_GrPixelConfig == config;
134 case VK_FORMAT_R32G32_SFLOAT:
135 return kRG_float_GrPixelConfig == config;
136 case VK_FORMAT_R16G16B16A16_SFLOAT:
137 return kRGBA_half_GrPixelConfig == config;
138 case VK_FORMAT_R16_SFLOAT:
139 return kAlpha_half_GrPixelConfig == config ||
140 kAlpha_half_as_Red_GrPixelConfig == config;
141 default:
142 return false;
143 }
144}
145
146bool GrVkFormatIsSupported(VkFormat format) {
147 switch (format) {
148 case VK_FORMAT_R8G8B8A8_UNORM:
149 case VK_FORMAT_B8G8R8A8_UNORM:
150 case VK_FORMAT_R8G8B8A8_SRGB:
151 case VK_FORMAT_B8G8R8A8_SRGB:
152 case VK_FORMAT_R8G8B8A8_SINT:
Brian Osman10fc6fd2018-03-02 11:01:10 -0500153 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
Greg Daniel81b80592017-12-13 10:20:04 -0500154 case VK_FORMAT_R5G6B5_UNORM_PACK16:
155 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
156 case VK_FORMAT_R8_UNORM:
157 case VK_FORMAT_R32G32B32A32_SFLOAT:
158 case VK_FORMAT_R32G32_SFLOAT:
159 case VK_FORMAT_R16G16B16A16_SFLOAT:
160 case VK_FORMAT_R16_SFLOAT:
161 return true;
162 default:
163 return false;
164 }
165}
166
brianosmanf05ab1b2016-05-12 11:01:10 -0700167bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
168 VkFormat linearFmt = format;
169 switch (format) {
170 case VK_FORMAT_R8_SRGB:
171 linearFmt = VK_FORMAT_R8_UNORM;
172 break;
173 case VK_FORMAT_R8G8_SRGB:
174 linearFmt = VK_FORMAT_R8G8_UNORM;
175 break;
176 case VK_FORMAT_R8G8B8_SRGB:
177 linearFmt = VK_FORMAT_R8G8B8_UNORM;
178 break;
179 case VK_FORMAT_B8G8R8_SRGB:
180 linearFmt = VK_FORMAT_B8G8R8_UNORM;
181 break;
182 case VK_FORMAT_R8G8B8A8_SRGB:
183 linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
184 break;
185 case VK_FORMAT_B8G8R8A8_SRGB:
186 linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
187 break;
188 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
189 linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
190 break;
191 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
192 linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
193 break;
194 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
195 linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
196 break;
197 case VK_FORMAT_BC2_SRGB_BLOCK:
198 linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
199 break;
200 case VK_FORMAT_BC3_SRGB_BLOCK:
201 linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
202 break;
203 case VK_FORMAT_BC7_SRGB_BLOCK:
204 linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
205 break;
206 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
207 linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
208 break;
209 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
210 linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
211 break;
212 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
213 linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
214 break;
215 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
216 linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
217 break;
218 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
219 linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
220 break;
221 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
222 linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
223 break;
224 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
225 linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
226 break;
227 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
228 linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
229 break;
230 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
231 linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
232 break;
233 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
234 linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
235 break;
236 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
237 linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
238 break;
239 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
240 linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
241 break;
242 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
243 linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
244 break;
245 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
246 linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
247 break;
248 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
249 linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
250 break;
251 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
252 linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
253 break;
254 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
255 linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
256 break;
257 default:
258 break;
259 }
260 if (linearFormat) {
261 *linearFormat = linearFmt;
262 }
263 return (linearFmt != format);
264}
265
Greg Daniel164a9f02016-02-22 09:56:40 -0500266bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
Brian Salomonbdecacf2018-02-02 20:32:49 -0500267 SkASSERT(samples >= 1);
Greg Daniel164a9f02016-02-22 09:56:40 -0500268 switch (samples) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500269 case 1:
270 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
271 return true;
272 case 2:
273 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
274 return true;
275 case 4:
egdanielbf63e612016-08-17 06:26:16 -0700276 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500277 return true;
278 case 8:
egdanielbf63e612016-08-17 06:26:16 -0700279 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500280 return true;
281 case 16:
egdanielbf63e612016-08-17 06:26:16 -0700282 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500283 return true;
284 case 32:
egdanielbf63e612016-08-17 06:26:16 -0700285 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500286 return true;
287 case 64:
egdanielbf63e612016-08-17 06:26:16 -0700288 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500289 return true;
290 default:
291 return false;
292 }
293}
egdaniel88987d82016-09-19 10:17:34 -0700294
egdaniel88987d82016-09-19 10:17:34 -0700295SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
296 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
297 return SkSL::Program::kVertex_Kind;
298 }
Chris Dalton33607c62017-07-07 11:00:48 -0600299 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
300 return SkSL::Program::kGeometry_Kind;
301 }
egdaniel88987d82016-09-19 10:17:34 -0700302 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
303 return SkSL::Program::kFragment_Kind;
304}
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500305
306VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) {
307 if (SkSL::Program::kVertex_Kind == kind) {
308 return VK_SHADER_STAGE_VERTEX_BIT;
egdaniel88987d82016-09-19 10:17:34 -0700309 }
Chris Dalton33607c62017-07-07 11:00:48 -0600310 if (SkSL::Program::kGeometry_Kind == kind) {
311 return VK_SHADER_STAGE_GEOMETRY_BIT;
312 }
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500313 SkASSERT(SkSL::Program::kFragment_Kind == kind);
314 return VK_SHADER_STAGE_FRAGMENT_BIT;
egdaniel88987d82016-09-19 10:17:34 -0700315}
egdaniel88987d82016-09-19 10:17:34 -0700316
317bool GrCompileVkShaderModule(const GrVkGpu* gpu,
318 const char* shaderString,
319 VkShaderStageFlagBits stage,
320 VkShaderModule* shaderModule,
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500321 VkPipelineShaderStageCreateInfo* stageInfo,
322 const SkSL::Program::Settings& settings,
323 SkSL::Program::Inputs* outInputs) {
324 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
325 vk_shader_stage_to_skiasl_kind(stage),
Brian Osman93ba0a42017-08-14 14:48:10 -0400326 SkSL::String(shaderString),
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500327 settings);
328 if (!program) {
329 SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
330 SkASSERT(false);
331 }
332 *outInputs = program->fInputs;
Ethan Nicholas0df1b042017-03-31 13:56:23 -0400333 SkSL::String code;
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500334 if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) {
335 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
336 return false;
337 }
338
egdaniel88987d82016-09-19 10:17:34 -0700339 VkShaderModuleCreateInfo moduleCreateInfo;
340 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
341 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
342 moduleCreateInfo.pNext = nullptr;
343 moduleCreateInfo.flags = 0;
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500344 moduleCreateInfo.codeSize = code.size();
345 moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
egdaniel88987d82016-09-19 10:17:34 -0700346
347 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
348 &moduleCreateInfo,
349 nullptr,
350 shaderModule));
egdaniel88987d82016-09-19 10:17:34 -0700351 if (err) {
352 return false;
353 }
354
355 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
356 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
357 stageInfo->pNext = nullptr;
358 stageInfo->flags = 0;
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500359 stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind);
egdaniel88987d82016-09-19 10:17:34 -0700360 stageInfo->module = *shaderModule;
361 stageInfo->pName = "main";
362 stageInfo->pSpecializationInfo = nullptr;
363
364 return true;
365}