blob: 0d07e1f57a7318d4b3b613af361143fdb89be313 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkUtil.h"
9
egdaniel88987d82016-09-19 10:17:34 -070010#include "vk/GrVkGpu.h"
11#if USE_SKSL
12#include "SkSLCompiler.h"
13#endif
14
Greg Daniel164a9f02016-02-22 09:56:40 -050015bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
16 VkFormat dontCare;
17 if (!format) {
18 format = &dontCare;
19 }
20
21 switch (config) {
22 case kRGBA_8888_GrPixelConfig:
23 *format = VK_FORMAT_R8G8B8A8_UNORM;
24 break;
25 case kBGRA_8888_GrPixelConfig:
26 *format = VK_FORMAT_B8G8R8A8_UNORM;
27 break;
28 case kSRGBA_8888_GrPixelConfig:
29 *format = VK_FORMAT_R8G8B8A8_SRGB;
30 break;
jvanverth9f372462016-04-06 06:08:59 -070031 case kSBGRA_8888_GrPixelConfig:
32 *format = VK_FORMAT_B8G8R8A8_SRGB;
33 break;
Greg Daniel164a9f02016-02-22 09:56:40 -050034 case kRGB_565_GrPixelConfig:
35 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
36 break;
37 case kRGBA_4444_GrPixelConfig:
egdaniel3fe03272016-08-15 10:59:17 -070038 // R4G4B4A4 is not required to be supported so we actually
39 // store the data is if it was B4G4R4A4 and swizzle in shaders
40 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
Greg Daniel164a9f02016-02-22 09:56:40 -050041 break;
42 case kIndex_8_GrPixelConfig:
jvanverth9f372462016-04-06 06:08:59 -070043 // No current vulkan support for this config
Greg Daniel164a9f02016-02-22 09:56:40 -050044 return false;
45 case kAlpha_8_GrPixelConfig:
46 *format = VK_FORMAT_R8_UNORM;
47 break;
48 case kETC1_GrPixelConfig:
49 // converting to ETC2 which is a superset of ETC1
50 *format = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
51 break;
52 case kLATC_GrPixelConfig:
jvanverth9f372462016-04-06 06:08:59 -070053 // No current vulkan support for this config
Greg Daniel164a9f02016-02-22 09:56:40 -050054 return false;
55 case kR11_EAC_GrPixelConfig:
56 *format = VK_FORMAT_EAC_R11_UNORM_BLOCK;
57 break;
58 case kASTC_12x12_GrPixelConfig:
59 *format = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
60 break;
61 case kRGBA_float_GrPixelConfig:
62 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
63 break;
64 case kRGBA_half_GrPixelConfig:
65 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
66 break;
67 case kAlpha_half_GrPixelConfig:
68 *format = VK_FORMAT_R16_SFLOAT;
69 break;
70 default:
71 return false;
72 }
73 return true;
74}
75
jvanverth9f372462016-04-06 06:08:59 -070076bool GrVkFormatToPixelConfig(VkFormat format, GrPixelConfig* config) {
77 GrPixelConfig dontCare;
78 if (!config) {
79 config = &dontCare;
80 }
81
82 switch (format) {
83 case VK_FORMAT_R8G8B8A8_UNORM:
84 *config = kRGBA_8888_GrPixelConfig;
85 break;
86 case VK_FORMAT_B8G8R8A8_UNORM:
87 *config = kBGRA_8888_GrPixelConfig;
88 break;
89 case VK_FORMAT_R8G8B8A8_SRGB:
90 *config = kSRGBA_8888_GrPixelConfig;
91 break;
92 case VK_FORMAT_B8G8R8A8_SRGB:
93 *config = kSBGRA_8888_GrPixelConfig;
94 break;
95 case VK_FORMAT_R5G6B5_UNORM_PACK16:
96 *config = kRGB_565_GrPixelConfig;
97 break;
egdaniel3fe03272016-08-15 10:59:17 -070098 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
99 // R4G4B4A4 is not required to be supported so we actually
100 // store RGBA_4444 data as B4G4R4A4.
jvanverth9f372462016-04-06 06:08:59 -0700101 *config = kRGBA_4444_GrPixelConfig;
102 break;
103 case VK_FORMAT_R8_UNORM:
104 *config = kAlpha_8_GrPixelConfig;
105 break;
106 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
107 *config = kETC1_GrPixelConfig;
108 break;
109 case VK_FORMAT_EAC_R11_UNORM_BLOCK:
110 *config = kR11_EAC_GrPixelConfig;
111 break;
112 case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
113 *config = kASTC_12x12_GrPixelConfig;
114 break;
115 case VK_FORMAT_R32G32B32A32_SFLOAT:
116 *config = kRGBA_float_GrPixelConfig;
117 break;
118 case VK_FORMAT_R16G16B16A16_SFLOAT:
119 *config = kRGBA_half_GrPixelConfig;
120 break;
121 case VK_FORMAT_R16_SFLOAT:
122 *config = kAlpha_half_GrPixelConfig;
123 break;
124 default:
125 return false;
126 }
127 return true;
128}
129
brianosmanf05ab1b2016-05-12 11:01:10 -0700130bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
131 VkFormat linearFmt = format;
132 switch (format) {
133 case VK_FORMAT_R8_SRGB:
134 linearFmt = VK_FORMAT_R8_UNORM;
135 break;
136 case VK_FORMAT_R8G8_SRGB:
137 linearFmt = VK_FORMAT_R8G8_UNORM;
138 break;
139 case VK_FORMAT_R8G8B8_SRGB:
140 linearFmt = VK_FORMAT_R8G8B8_UNORM;
141 break;
142 case VK_FORMAT_B8G8R8_SRGB:
143 linearFmt = VK_FORMAT_B8G8R8_UNORM;
144 break;
145 case VK_FORMAT_R8G8B8A8_SRGB:
146 linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
147 break;
148 case VK_FORMAT_B8G8R8A8_SRGB:
149 linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
150 break;
151 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
152 linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
153 break;
154 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
155 linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
156 break;
157 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
158 linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
159 break;
160 case VK_FORMAT_BC2_SRGB_BLOCK:
161 linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
162 break;
163 case VK_FORMAT_BC3_SRGB_BLOCK:
164 linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
165 break;
166 case VK_FORMAT_BC7_SRGB_BLOCK:
167 linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
168 break;
169 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
170 linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
171 break;
172 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
173 linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
174 break;
175 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
176 linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
177 break;
178 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
179 linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
180 break;
181 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
182 linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
183 break;
184 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
185 linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
186 break;
187 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
188 linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
189 break;
190 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
191 linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
192 break;
193 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
194 linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
195 break;
196 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
197 linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
198 break;
199 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
200 linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
201 break;
202 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
203 linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
204 break;
205 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
206 linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
207 break;
208 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
209 linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
210 break;
211 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
212 linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
213 break;
214 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
215 linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
216 break;
217 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
218 linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
219 break;
220 default:
221 break;
222 }
223 if (linearFormat) {
224 *linearFormat = linearFmt;
225 }
226 return (linearFmt != format);
227}
228
Greg Daniel164a9f02016-02-22 09:56:40 -0500229bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
230 switch (samples) {
231 case 0: // fall through
232 case 1:
233 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
234 return true;
235 case 2:
236 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
237 return true;
238 case 4:
egdanielbf63e612016-08-17 06:26:16 -0700239 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500240 return true;
241 case 8:
egdanielbf63e612016-08-17 06:26:16 -0700242 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500243 return true;
244 case 16:
egdanielbf63e612016-08-17 06:26:16 -0700245 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500246 return true;
247 case 32:
egdanielbf63e612016-08-17 06:26:16 -0700248 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500249 return true;
250 case 64:
egdanielbf63e612016-08-17 06:26:16 -0700251 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500252 return true;
253 default:
254 return false;
255 }
256}
egdaniel88987d82016-09-19 10:17:34 -0700257
258#if USE_SKSL
259SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
260 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
261 return SkSL::Program::kVertex_Kind;
262 }
263 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
264 return SkSL::Program::kFragment_Kind;
265}
266#else
267shaderc_shader_kind vk_shader_stage_to_shaderc_kind(VkShaderStageFlagBits stage) {
268 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
269 return shaderc_glsl_vertex_shader;
270 }
271 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
272 return shaderc_glsl_fragment_shader;
273}
274#endif
275
276bool GrCompileVkShaderModule(const GrVkGpu* gpu,
277 const char* shaderString,
278 VkShaderStageFlagBits stage,
279 VkShaderModule* shaderModule,
280 VkPipelineShaderStageCreateInfo* stageInfo) {
281 VkShaderModuleCreateInfo moduleCreateInfo;
282 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
283 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
284 moduleCreateInfo.pNext = nullptr;
285 moduleCreateInfo.flags = 0;
286
287#if USE_SKSL
288 std::string code;
289#else
290 shaderc_compilation_result_t result = nullptr;
291#endif
292
293 if (gpu->vkCaps().canUseGLSLForShaderModule()) {
294 moduleCreateInfo.codeSize = strlen(shaderString);
295 moduleCreateInfo.pCode = (const uint32_t*)shaderString;
296 } else {
297
298#if USE_SKSL
299 bool result = gpu->shaderCompiler()->toSPIRV(vk_shader_stage_to_skiasl_kind(stage),
300 std::string(shaderString),
301 &code);
302 if (!result) {
303 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
304 return false;
305 }
306 moduleCreateInfo.codeSize = code.size();
307 moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
308#else
309 shaderc_compiler_t compiler = gpu->shadercCompiler();
310
311 shaderc_compile_options_t options = shaderc_compile_options_initialize();
312
313 shaderc_shader_kind shadercStage = vk_shader_stage_to_shaderc_kind(stage);
314 result = shaderc_compile_into_spv(compiler,
315 shaderString.c_str(),
316 strlen(shaderString),
317 shadercStage,
318 "shader",
319 "main",
320 options);
321 shaderc_compile_options_release(options);
322#ifdef SK_DEBUG
323 if (shaderc_result_get_num_errors(result)) {
324 SkDebugf("%s\n", shaderString);
325 SkDebugf("%s\n", shaderc_result_get_error_message(result));
326 return false;
327 }
328#endif // SK_DEBUG
329
330 moduleCreateInfo.codeSize = shaderc_result_get_length(result);
331 moduleCreateInfo.pCode = (const uint32_t*)shaderc_result_get_bytes(result);
332#endif // USE_SKSL
333 }
334
335 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
336 &moduleCreateInfo,
337 nullptr,
338 shaderModule));
339
340 if (!gpu->vkCaps().canUseGLSLForShaderModule()) {
341#if !USE_SKSL
342 shaderc_result_release(result);
343#endif
344 }
345 if (err) {
346 return false;
347 }
348
349 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
350 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
351 stageInfo->pNext = nullptr;
352 stageInfo->flags = 0;
353 stageInfo->stage = stage;
354 stageInfo->module = *shaderModule;
355 stageInfo->pName = "main";
356 stageInfo->pSpecializationInfo = nullptr;
357
358 return true;
359}