blob: 2f4e9b1d1d9f006a71ea3d7955efb34e4e773a29 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkUtil.h"
9
egdaniel88987d82016-09-19 10:17:34 -070010#include "vk/GrVkGpu.h"
egdaniel88987d82016-09-19 10:17:34 -070011#include "SkSLCompiler.h"
egdaniel88987d82016-09-19 10:17:34 -070012
Greg Daniel164a9f02016-02-22 09:56:40 -050013bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14 VkFormat dontCare;
15 if (!format) {
16 format = &dontCare;
17 }
18
19 switch (config) {
Brian Salomonbf7b6202016-11-11 16:08:03 -050020 case kUnknown_GrPixelConfig:
21 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -050022 case kRGBA_8888_GrPixelConfig:
23 *format = VK_FORMAT_R8G8B8A8_UNORM;
Brian Salomonbf7b6202016-11-11 16:08:03 -050024 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050025 case kBGRA_8888_GrPixelConfig:
26 *format = VK_FORMAT_B8G8R8A8_UNORM;
Brian Salomonbf7b6202016-11-11 16:08:03 -050027 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050028 case kSRGBA_8888_GrPixelConfig:
29 *format = VK_FORMAT_R8G8B8A8_SRGB;
Brian Salomonbf7b6202016-11-11 16:08:03 -050030 return true;
jvanverth9f372462016-04-06 06:08:59 -070031 case kSBGRA_8888_GrPixelConfig:
32 *format = VK_FORMAT_B8G8R8A8_SRGB;
Brian Salomonbf7b6202016-11-11 16:08:03 -050033 return true;
34 case kRGBA_8888_sint_GrPixelConfig:
35 *format = VK_FORMAT_R8G8B8A8_SINT;
36 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050037 case kRGB_565_GrPixelConfig:
38 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
Brian Salomonbf7b6202016-11-11 16:08:03 -050039 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050040 case kRGBA_4444_GrPixelConfig:
egdaniel3fe03272016-08-15 10:59:17 -070041 // R4G4B4A4 is not required to be supported so we actually
42 // store the data is if it was B4G4R4A4 and swizzle in shaders
43 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
Brian Salomonbf7b6202016-11-11 16:08:03 -050044 return true;
Greg Danielef59d872017-11-17 16:47:21 -050045 case kAlpha_8_GrPixelConfig: // fall through
46 case kAlpha_8_as_Red_GrPixelConfig:
Greg Daniel164a9f02016-02-22 09:56:40 -050047 *format = VK_FORMAT_R8_UNORM;
Brian Salomonbf7b6202016-11-11 16:08:03 -050048 return true;
Greg Danielef59d872017-11-17 16:47:21 -050049 case kAlpha_8_as_Alpha_GrPixelConfig:
50 return false;
Brian Osman986563b2017-01-10 14:20:02 -050051 case kGray_8_GrPixelConfig:
52 *format = VK_FORMAT_R8_UNORM;
53 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050054 case kRGBA_float_GrPixelConfig:
55 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
Brian Salomonbf7b6202016-11-11 16:08:03 -050056 return true;
csmartdalton6aa0e112017-02-08 16:14:11 -050057 case kRG_float_GrPixelConfig:
58 *format = VK_FORMAT_R32G32_SFLOAT;
59 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050060 case kRGBA_half_GrPixelConfig:
61 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
Brian Salomonbf7b6202016-11-11 16:08:03 -050062 return true;
Greg Danielef59d872017-11-17 16:47:21 -050063 case kAlpha_half_GrPixelConfig: // fall through
64 case kAlpha_half_as_Red_GrPixelConfig:
Greg Daniel164a9f02016-02-22 09:56:40 -050065 *format = VK_FORMAT_R16_SFLOAT;
Brian Salomonbf7b6202016-11-11 16:08:03 -050066 return true;
Greg Daniel164a9f02016-02-22 09:56:40 -050067 }
Ben Wagnerb4aab9a2017-08-16 10:53:04 -040068 SK_ABORT("Unexpected config");
Brian Salomonbf7b6202016-11-11 16:08:03 -050069 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -050070}
71
Greg Daniel94403452017-04-18 15:52:36 -040072GrPixelConfig GrVkFormatToPixelConfig(VkFormat format) {
jvanverth9f372462016-04-06 06:08:59 -070073 switch (format) {
74 case VK_FORMAT_R8G8B8A8_UNORM:
Greg Daniel94403452017-04-18 15:52:36 -040075 return kRGBA_8888_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070076 case VK_FORMAT_B8G8R8A8_UNORM:
Greg Daniel94403452017-04-18 15:52:36 -040077 return kBGRA_8888_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070078 case VK_FORMAT_R8G8B8A8_SRGB:
Greg Daniel94403452017-04-18 15:52:36 -040079 return kSRGBA_8888_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070080 case VK_FORMAT_B8G8R8A8_SRGB:
Greg Daniel94403452017-04-18 15:52:36 -040081 return kSBGRA_8888_GrPixelConfig;
Brian Salomonbf7b6202016-11-11 16:08:03 -050082 case VK_FORMAT_R8G8B8A8_SINT:
Greg Daniel94403452017-04-18 15:52:36 -040083 return kRGBA_8888_sint_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070084 case VK_FORMAT_R5G6B5_UNORM_PACK16:
Greg Daniel94403452017-04-18 15:52:36 -040085 return kRGB_565_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070086 break;
egdaniel3fe03272016-08-15 10:59:17 -070087 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
88 // R4G4B4A4 is not required to be supported so we actually
89 // store RGBA_4444 data as B4G4R4A4.
Greg Daniel94403452017-04-18 15:52:36 -040090 return kRGBA_4444_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070091 case VK_FORMAT_R8_UNORM:
Greg Daniel94403452017-04-18 15:52:36 -040092 return kAlpha_8_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070093 case VK_FORMAT_R32G32B32A32_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -040094 return kRGBA_float_GrPixelConfig;
csmartdalton6aa0e112017-02-08 16:14:11 -050095 case VK_FORMAT_R32G32_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -040096 return kRG_float_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070097 case VK_FORMAT_R16G16B16A16_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -040098 return kRGBA_half_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -070099 case VK_FORMAT_R16_SFLOAT:
Greg Daniel94403452017-04-18 15:52:36 -0400100 return kAlpha_half_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -0700101 default:
Greg Daniel94403452017-04-18 15:52:36 -0400102 return kUnknown_GrPixelConfig;
jvanverth9f372462016-04-06 06:08:59 -0700103 }
jvanverth9f372462016-04-06 06:08:59 -0700104}
105
brianosmanf05ab1b2016-05-12 11:01:10 -0700106bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
107 VkFormat linearFmt = format;
108 switch (format) {
109 case VK_FORMAT_R8_SRGB:
110 linearFmt = VK_FORMAT_R8_UNORM;
111 break;
112 case VK_FORMAT_R8G8_SRGB:
113 linearFmt = VK_FORMAT_R8G8_UNORM;
114 break;
115 case VK_FORMAT_R8G8B8_SRGB:
116 linearFmt = VK_FORMAT_R8G8B8_UNORM;
117 break;
118 case VK_FORMAT_B8G8R8_SRGB:
119 linearFmt = VK_FORMAT_B8G8R8_UNORM;
120 break;
121 case VK_FORMAT_R8G8B8A8_SRGB:
122 linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
123 break;
124 case VK_FORMAT_B8G8R8A8_SRGB:
125 linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
126 break;
127 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
128 linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
129 break;
130 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
131 linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
132 break;
133 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
134 linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
135 break;
136 case VK_FORMAT_BC2_SRGB_BLOCK:
137 linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
138 break;
139 case VK_FORMAT_BC3_SRGB_BLOCK:
140 linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
141 break;
142 case VK_FORMAT_BC7_SRGB_BLOCK:
143 linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
144 break;
145 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
146 linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
147 break;
148 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
149 linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
150 break;
151 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
152 linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
153 break;
154 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
155 linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
156 break;
157 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
158 linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
159 break;
160 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
161 linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
162 break;
163 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
164 linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
165 break;
166 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
167 linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
168 break;
169 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
170 linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
171 break;
172 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
173 linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
174 break;
175 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
176 linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
177 break;
178 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
179 linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
180 break;
181 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
182 linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
183 break;
184 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
185 linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
186 break;
187 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
188 linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
189 break;
190 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
191 linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
192 break;
193 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
194 linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
195 break;
196 default:
197 break;
198 }
199 if (linearFormat) {
200 *linearFormat = linearFmt;
201 }
202 return (linearFmt != format);
203}
204
Greg Daniel164a9f02016-02-22 09:56:40 -0500205bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
206 switch (samples) {
207 case 0: // fall through
208 case 1:
209 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
210 return true;
211 case 2:
212 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
213 return true;
214 case 4:
egdanielbf63e612016-08-17 06:26:16 -0700215 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500216 return true;
217 case 8:
egdanielbf63e612016-08-17 06:26:16 -0700218 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500219 return true;
220 case 16:
egdanielbf63e612016-08-17 06:26:16 -0700221 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500222 return true;
223 case 32:
egdanielbf63e612016-08-17 06:26:16 -0700224 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500225 return true;
226 case 64:
egdanielbf63e612016-08-17 06:26:16 -0700227 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500228 return true;
229 default:
230 return false;
231 }
232}
egdaniel88987d82016-09-19 10:17:34 -0700233
egdaniel88987d82016-09-19 10:17:34 -0700234SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
235 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
236 return SkSL::Program::kVertex_Kind;
237 }
Chris Dalton33607c62017-07-07 11:00:48 -0600238 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
239 return SkSL::Program::kGeometry_Kind;
240 }
egdaniel88987d82016-09-19 10:17:34 -0700241 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
242 return SkSL::Program::kFragment_Kind;
243}
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500244
245VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) {
246 if (SkSL::Program::kVertex_Kind == kind) {
247 return VK_SHADER_STAGE_VERTEX_BIT;
egdaniel88987d82016-09-19 10:17:34 -0700248 }
Chris Dalton33607c62017-07-07 11:00:48 -0600249 if (SkSL::Program::kGeometry_Kind == kind) {
250 return VK_SHADER_STAGE_GEOMETRY_BIT;
251 }
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500252 SkASSERT(SkSL::Program::kFragment_Kind == kind);
253 return VK_SHADER_STAGE_FRAGMENT_BIT;
egdaniel88987d82016-09-19 10:17:34 -0700254}
egdaniel88987d82016-09-19 10:17:34 -0700255
256bool GrCompileVkShaderModule(const GrVkGpu* gpu,
257 const char* shaderString,
258 VkShaderStageFlagBits stage,
259 VkShaderModule* shaderModule,
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500260 VkPipelineShaderStageCreateInfo* stageInfo,
261 const SkSL::Program::Settings& settings,
262 SkSL::Program::Inputs* outInputs) {
263 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
264 vk_shader_stage_to_skiasl_kind(stage),
Brian Osman93ba0a42017-08-14 14:48:10 -0400265 SkSL::String(shaderString),
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500266 settings);
267 if (!program) {
268 SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
269 SkASSERT(false);
270 }
271 *outInputs = program->fInputs;
Ethan Nicholas0df1b042017-03-31 13:56:23 -0400272 SkSL::String code;
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500273 if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) {
274 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
275 return false;
276 }
277
egdaniel88987d82016-09-19 10:17:34 -0700278 VkShaderModuleCreateInfo moduleCreateInfo;
279 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
280 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
281 moduleCreateInfo.pNext = nullptr;
282 moduleCreateInfo.flags = 0;
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500283 moduleCreateInfo.codeSize = code.size();
284 moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
egdaniel88987d82016-09-19 10:17:34 -0700285
286 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
287 &moduleCreateInfo,
288 nullptr,
289 shaderModule));
egdaniel88987d82016-09-19 10:17:34 -0700290 if (err) {
291 return false;
292 }
293
294 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
295 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
296 stageInfo->pNext = nullptr;
297 stageInfo->flags = 0;
Ethan Nicholas941e7e22016-12-12 15:33:30 -0500298 stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind);
egdaniel88987d82016-09-19 10:17:34 -0700299 stageInfo->module = *shaderModule;
300 stageInfo->pName = "main";
301 stageInfo->pSpecializationInfo = nullptr;
302
303 return true;
304}