blob: e0d5daf99dc001437f993bc47414ddd92138c404 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkUtil.h"
9
10bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
11 VkFormat dontCare;
12 if (!format) {
13 format = &dontCare;
14 }
15
16 switch (config) {
17 case kRGBA_8888_GrPixelConfig:
18 *format = VK_FORMAT_R8G8B8A8_UNORM;
19 break;
20 case kBGRA_8888_GrPixelConfig:
21 *format = VK_FORMAT_B8G8R8A8_UNORM;
22 break;
23 case kSRGBA_8888_GrPixelConfig:
24 *format = VK_FORMAT_R8G8B8A8_SRGB;
25 break;
jvanverth9f372462016-04-06 06:08:59 -070026 case kSBGRA_8888_GrPixelConfig:
27 *format = VK_FORMAT_B8G8R8A8_SRGB;
28 break;
Greg Daniel164a9f02016-02-22 09:56:40 -050029 case kRGB_565_GrPixelConfig:
30 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
31 break;
32 case kRGBA_4444_GrPixelConfig:
egdaniel3fe03272016-08-15 10:59:17 -070033 // R4G4B4A4 is not required to be supported so we actually
34 // store the data is if it was B4G4R4A4 and swizzle in shaders
35 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
Greg Daniel164a9f02016-02-22 09:56:40 -050036 break;
37 case kIndex_8_GrPixelConfig:
jvanverth9f372462016-04-06 06:08:59 -070038 // No current vulkan support for this config
Greg Daniel164a9f02016-02-22 09:56:40 -050039 return false;
40 case kAlpha_8_GrPixelConfig:
41 *format = VK_FORMAT_R8_UNORM;
42 break;
43 case kETC1_GrPixelConfig:
44 // converting to ETC2 which is a superset of ETC1
45 *format = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
46 break;
47 case kLATC_GrPixelConfig:
jvanverth9f372462016-04-06 06:08:59 -070048 // No current vulkan support for this config
Greg Daniel164a9f02016-02-22 09:56:40 -050049 return false;
50 case kR11_EAC_GrPixelConfig:
51 *format = VK_FORMAT_EAC_R11_UNORM_BLOCK;
52 break;
53 case kASTC_12x12_GrPixelConfig:
54 *format = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
55 break;
56 case kRGBA_float_GrPixelConfig:
57 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
58 break;
59 case kRGBA_half_GrPixelConfig:
60 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
61 break;
62 case kAlpha_half_GrPixelConfig:
63 *format = VK_FORMAT_R16_SFLOAT;
64 break;
65 default:
66 return false;
67 }
68 return true;
69}
70
jvanverth9f372462016-04-06 06:08:59 -070071bool GrVkFormatToPixelConfig(VkFormat format, GrPixelConfig* config) {
72 GrPixelConfig dontCare;
73 if (!config) {
74 config = &dontCare;
75 }
76
77 switch (format) {
78 case VK_FORMAT_R8G8B8A8_UNORM:
79 *config = kRGBA_8888_GrPixelConfig;
80 break;
81 case VK_FORMAT_B8G8R8A8_UNORM:
82 *config = kBGRA_8888_GrPixelConfig;
83 break;
84 case VK_FORMAT_R8G8B8A8_SRGB:
85 *config = kSRGBA_8888_GrPixelConfig;
86 break;
87 case VK_FORMAT_B8G8R8A8_SRGB:
88 *config = kSBGRA_8888_GrPixelConfig;
89 break;
90 case VK_FORMAT_R5G6B5_UNORM_PACK16:
91 *config = kRGB_565_GrPixelConfig;
92 break;
egdaniel3fe03272016-08-15 10:59:17 -070093 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
94 // R4G4B4A4 is not required to be supported so we actually
95 // store RGBA_4444 data as B4G4R4A4.
jvanverth9f372462016-04-06 06:08:59 -070096 *config = kRGBA_4444_GrPixelConfig;
97 break;
98 case VK_FORMAT_R8_UNORM:
99 *config = kAlpha_8_GrPixelConfig;
100 break;
101 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
102 *config = kETC1_GrPixelConfig;
103 break;
104 case VK_FORMAT_EAC_R11_UNORM_BLOCK:
105 *config = kR11_EAC_GrPixelConfig;
106 break;
107 case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
108 *config = kASTC_12x12_GrPixelConfig;
109 break;
110 case VK_FORMAT_R32G32B32A32_SFLOAT:
111 *config = kRGBA_float_GrPixelConfig;
112 break;
113 case VK_FORMAT_R16G16B16A16_SFLOAT:
114 *config = kRGBA_half_GrPixelConfig;
115 break;
116 case VK_FORMAT_R16_SFLOAT:
117 *config = kAlpha_half_GrPixelConfig;
118 break;
119 default:
120 return false;
121 }
122 return true;
123}
124
brianosmanf05ab1b2016-05-12 11:01:10 -0700125bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
126 VkFormat linearFmt = format;
127 switch (format) {
128 case VK_FORMAT_R8_SRGB:
129 linearFmt = VK_FORMAT_R8_UNORM;
130 break;
131 case VK_FORMAT_R8G8_SRGB:
132 linearFmt = VK_FORMAT_R8G8_UNORM;
133 break;
134 case VK_FORMAT_R8G8B8_SRGB:
135 linearFmt = VK_FORMAT_R8G8B8_UNORM;
136 break;
137 case VK_FORMAT_B8G8R8_SRGB:
138 linearFmt = VK_FORMAT_B8G8R8_UNORM;
139 break;
140 case VK_FORMAT_R8G8B8A8_SRGB:
141 linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
142 break;
143 case VK_FORMAT_B8G8R8A8_SRGB:
144 linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
145 break;
146 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
147 linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
148 break;
149 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
150 linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
151 break;
152 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
153 linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
154 break;
155 case VK_FORMAT_BC2_SRGB_BLOCK:
156 linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
157 break;
158 case VK_FORMAT_BC3_SRGB_BLOCK:
159 linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
160 break;
161 case VK_FORMAT_BC7_SRGB_BLOCK:
162 linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
163 break;
164 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
165 linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
166 break;
167 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
168 linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
169 break;
170 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
171 linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
172 break;
173 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
174 linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
175 break;
176 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
177 linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
178 break;
179 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
180 linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
181 break;
182 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
183 linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
184 break;
185 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
186 linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
187 break;
188 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
189 linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
190 break;
191 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
192 linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
193 break;
194 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
195 linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
196 break;
197 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
198 linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
199 break;
200 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
201 linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
202 break;
203 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
204 linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
205 break;
206 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
207 linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
208 break;
209 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
210 linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
211 break;
212 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
213 linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
214 break;
215 default:
216 break;
217 }
218 if (linearFormat) {
219 *linearFormat = linearFmt;
220 }
221 return (linearFmt != format);
222}
223
Greg Daniel164a9f02016-02-22 09:56:40 -0500224bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
225 switch (samples) {
226 case 0: // fall through
227 case 1:
228 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
229 return true;
230 case 2:
231 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
232 return true;
233 case 4:
egdanielbf63e612016-08-17 06:26:16 -0700234 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500235 return true;
236 case 8:
egdanielbf63e612016-08-17 06:26:16 -0700237 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500238 return true;
239 case 16:
egdanielbf63e612016-08-17 06:26:16 -0700240 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500241 return true;
242 case 32:
egdanielbf63e612016-08-17 06:26:16 -0700243 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500244 return true;
245 case 64:
egdanielbf63e612016-08-17 06:26:16 -0700246 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500247 return true;
248 default:
249 return false;
250 }
251}