blob: 4a6e977a8c3ddb5c7e5cd52dd9b46a1dd61e6eb6 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkUniformHandler.h"
9#include "glsl/GrGLSLProgramBuilder.h"
10
11// To determine whether a current offset is aligned, we can just 'and' the lowest bits with the
12// alignment mask. A value of 0 means aligned, any other value is how many bytes past alignment we
13// are. This works since all alignments are powers of 2. The mask is always (alignment - 1).
egdaniel4ee1cda2016-02-26 08:18:49 -080014// This alignment mask will give correct alignments for using the std430 block layout. If you want
15// the std140 alignment, you can use this, but then make sure if you have an array type it is
16// aligned to 16 bytes (i.e. has mask of 0xF).
Greg Daniel164a9f02016-02-22 09:56:40 -050017uint32_t grsltype_to_alignment_mask(GrSLType type) {
18 SkASSERT(GrSLTypeIsFloatType(type));
bsalomon134af6b2016-04-12 07:46:21 -070019 static const uint32_t kAlignmentMask[] = {
Greg Daniel164a9f02016-02-22 09:56:40 -050020 0x0, // kVoid_GrSLType, should never return this
21 0x3, // kFloat_GrSLType
22 0x7, // kVec2f_GrSLType
23 0xF, // kVec3f_GrSLType
24 0xF, // kVec4f_GrSLType
cdalton8d988b32016-03-07 15:39:09 -080025 0x7, // kMat22f_GrSLType
Greg Daniel164a9f02016-02-22 09:56:40 -050026 0xF, // kMat33f_GrSLType
27 0xF, // kMat44f_GrSLType
28 0x0, // Sampler2D_GrSLType, should never return this
29 0x0, // SamplerExternal_GrSLType, should never return this
bsalomon134af6b2016-04-12 07:46:21 -070030 0x0, // Sampler2DRect_GrSLType, should never return this
31 0x0, // SamplerBuffer_GrSLType, should never return this
32 0x0, // kBool_GrSLType
33 0x7, // kInt_GrSLType
34 0x7, // kUint_GrSLType
egdaniel990dbc82016-07-13 14:09:30 -070035 0x0, // Texture2D_GrSLType, should never return this
36 0x0, // Sampler_GrSLType, should never return this
Greg Daniel164a9f02016-02-22 09:56:40 -050037 };
38 GR_STATIC_ASSERT(0 == kVoid_GrSLType);
39 GR_STATIC_ASSERT(1 == kFloat_GrSLType);
40 GR_STATIC_ASSERT(2 == kVec2f_GrSLType);
41 GR_STATIC_ASSERT(3 == kVec3f_GrSLType);
42 GR_STATIC_ASSERT(4 == kVec4f_GrSLType);
cdalton8d988b32016-03-07 15:39:09 -080043 GR_STATIC_ASSERT(5 == kMat22f_GrSLType);
44 GR_STATIC_ASSERT(6 == kMat33f_GrSLType);
45 GR_STATIC_ASSERT(7 == kMat44f_GrSLType);
egdaniel990dbc82016-07-13 14:09:30 -070046 GR_STATIC_ASSERT(8 == kTexture2DSampler_GrSLType);
47 GR_STATIC_ASSERT(9 == kTextureExternalSampler_GrSLType);
48 GR_STATIC_ASSERT(10 == kTexture2DRectSampler_GrSLType);
49 GR_STATIC_ASSERT(11 == kTextureBufferSampler_GrSLType);
bsalomon134af6b2016-04-12 07:46:21 -070050 GR_STATIC_ASSERT(12 == kBool_GrSLType);
51 GR_STATIC_ASSERT(13 == kInt_GrSLType);
52 GR_STATIC_ASSERT(14 == kUint_GrSLType);
egdaniel990dbc82016-07-13 14:09:30 -070053 GR_STATIC_ASSERT(15 == kTexture2D_GrSLType);
54 GR_STATIC_ASSERT(16 == kSampler_GrSLType);
bsalomon134af6b2016-04-12 07:46:21 -070055 GR_STATIC_ASSERT(SK_ARRAY_COUNT(kAlignmentMask) == kGrSLTypeCount);
56 return kAlignmentMask[type];
Greg Daniel164a9f02016-02-22 09:56:40 -050057}
58
egdaniel4ee1cda2016-02-26 08:18:49 -080059/** Returns the size in bytes taken up in vulkanbuffers for floating point GrSLTypes.
egdaniel75d2bfc2016-07-07 08:04:08 -070060 For non floating point type returns 0. Currently this reflects the std140 alignment
61 so a mat22 takes up 8 floats. */
egdaniel4ee1cda2016-02-26 08:18:49 -080062static inline uint32_t grsltype_to_vk_size(GrSLType type) {
63 SkASSERT(GrSLTypeIsFloatType(type));
64 static const uint32_t kSizes[] = {
65 0, // kVoid_GrSLType
66 sizeof(float), // kFloat_GrSLType
67 2 * sizeof(float), // kVec2f_GrSLType
68 3 * sizeof(float), // kVec3f_GrSLType
69 4 * sizeof(float), // kVec4f_GrSLType
cdalton8d988b32016-03-07 15:39:09 -080070 8 * sizeof(float), // kMat22f_GrSLType. TODO: this will be 4 * szof(float) on std430.
egdaniel4ee1cda2016-02-26 08:18:49 -080071 12 * sizeof(float), // kMat33f_GrSLType
72 16 * sizeof(float), // kMat44f_GrSLType
egdaniel990dbc82016-07-13 14:09:30 -070073 0, // kTexture2DSampler_GrSLType
74 0, // kTextureExternalSampler_GrSLType
75 0, // kTexture2DRectSampler_GrSLType
76 0, // kTextureBufferSampler_GrSLType
bsalomon134af6b2016-04-12 07:46:21 -070077 1, // kBool_GrSLType
78 4, // kInt_GrSLType
egdaniel990dbc82016-07-13 14:09:30 -070079 4, // kUint_GrSLType
80 0, // kTexture2D_GrSLType
81 0, // kSampler_GrSLType
egdaniel4ee1cda2016-02-26 08:18:49 -080082 };
83 return kSizes[type];
84
85 GR_STATIC_ASSERT(0 == kVoid_GrSLType);
86 GR_STATIC_ASSERT(1 == kFloat_GrSLType);
87 GR_STATIC_ASSERT(2 == kVec2f_GrSLType);
88 GR_STATIC_ASSERT(3 == kVec3f_GrSLType);
89 GR_STATIC_ASSERT(4 == kVec4f_GrSLType);
cdalton8d988b32016-03-07 15:39:09 -080090 GR_STATIC_ASSERT(5 == kMat22f_GrSLType);
91 GR_STATIC_ASSERT(6 == kMat33f_GrSLType);
92 GR_STATIC_ASSERT(7 == kMat44f_GrSLType);
egdaniel990dbc82016-07-13 14:09:30 -070093 GR_STATIC_ASSERT(8 == kTexture2DSampler_GrSLType);
94 GR_STATIC_ASSERT(9 == kTextureExternalSampler_GrSLType);
95 GR_STATIC_ASSERT(10 == kTexture2DRectSampler_GrSLType);
96 GR_STATIC_ASSERT(11 == kTextureBufferSampler_GrSLType);
bsalomon134af6b2016-04-12 07:46:21 -070097 GR_STATIC_ASSERT(12 == kBool_GrSLType);
98 GR_STATIC_ASSERT(13 == kInt_GrSLType);
99 GR_STATIC_ASSERT(14 == kUint_GrSLType);
egdaniel990dbc82016-07-13 14:09:30 -0700100 GR_STATIC_ASSERT(15 == kTexture2D_GrSLType);
101 GR_STATIC_ASSERT(16 == kSampler_GrSLType);
cdalton8d988b32016-03-07 15:39:09 -0800102 GR_STATIC_ASSERT(SK_ARRAY_COUNT(kSizes) == kGrSLTypeCount);
egdaniel4ee1cda2016-02-26 08:18:49 -0800103}
104
105
Greg Daniel164a9f02016-02-22 09:56:40 -0500106// Given the current offset into the ubo, calculate the offset for the uniform we're trying to add
107// taking into consideration all alignment requirements. The uniformOffset is set to the offset for
108// the new uniform, and currentOffset is updated to be the offset to the end of the new uniform.
109void get_ubo_aligned_offset(uint32_t* uniformOffset,
110 uint32_t* currentOffset,
111 GrSLType type,
112 int arrayCount) {
113 uint32_t alignmentMask = grsltype_to_alignment_mask(type);
egdaniel4ee1cda2016-02-26 08:18:49 -0800114 // We want to use the std140 layout here, so we must make arrays align to 16 bytes.
egdaniel75d2bfc2016-07-07 08:04:08 -0700115 if (arrayCount || type == kMat22f_GrSLType) {
egdaniel4ee1cda2016-02-26 08:18:49 -0800116 alignmentMask = 0xF;
117 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500118 uint32_t offsetDiff = *currentOffset & alignmentMask;
119 if (offsetDiff != 0) {
120 offsetDiff = alignmentMask - offsetDiff + 1;
121 }
122 *uniformOffset = *currentOffset + offsetDiff;
123 SkASSERT(sizeof(float) == 4);
egdaniel4ee1cda2016-02-26 08:18:49 -0800124 if (arrayCount) {
125 uint32_t elementSize = SkTMax<uint32_t>(16, grsltype_to_vk_size(type));
126 SkASSERT(0 == (elementSize & 0xF));
127 *currentOffset = *uniformOffset + elementSize * arrayCount;
128 } else {
129 *currentOffset = *uniformOffset + grsltype_to_vk_size(type);
130 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500131}
132
133GrGLSLUniformHandler::UniformHandle GrVkUniformHandler::internalAddUniformArray(
134 uint32_t visibility,
135 GrSLType type,
136 GrSLPrecision precision,
137 const char* name,
138 bool mangleName,
139 int arrayCount,
140 const char** outName) {
141 SkASSERT(name && strlen(name));
142 SkDEBUGCODE(static const uint32_t kVisibilityMask = kVertex_GrShaderFlag|kFragment_GrShaderFlag);
143 SkASSERT(0 == (~kVisibilityMask & visibility));
144 SkASSERT(0 != visibility);
145 SkASSERT(kDefault_GrSLPrecision == precision || GrSLTypeIsFloatType(type));
egdaniel09aa1fc2016-04-20 07:09:46 -0700146 GrSLTypeIsFloatType(type);
Greg Daniel164a9f02016-02-22 09:56:40 -0500147
148 UniformInfo& uni = fUniforms.push_back();
149 uni.fVariable.setType(type);
150 // TODO this is a bit hacky, lets think of a better way. Basically we need to be able to use
151 // the uniform view matrix name in the GP, and the GP is immutable so it has to tell the PB
152 // exactly what name it wants to use for the uniform view matrix. If we prefix anythings, then
153 // the names will mismatch. I think the correct solution is to have all GPs which need the
154 // uniform view matrix, they should upload the view matrix in their setData along with regular
155 // uniforms.
156 char prefix = 'u';
157 if ('u' == name[0]) {
158 prefix = '\0';
159 }
160 fProgramBuilder->nameVariable(uni.fVariable.accessName(), prefix, name, mangleName);
161 uni.fVariable.setArrayCount(arrayCount);
162 // For now asserting the the visibility is either only vertex or only fragment
163 SkASSERT(kVertex_GrShaderFlag == visibility || kFragment_GrShaderFlag == visibility);
164 uni.fVisibility = visibility;
165 uni.fVariable.setPrecision(precision);
egdaniel09aa1fc2016-04-20 07:09:46 -0700166 // When outputing the GLSL, only the outer uniform block will get the Uniform modifier. Thus
167 // we set the modifier to none for all uniforms declared inside the block.
168 uni.fVariable.setTypeModifier(GrGLSLShaderVar::kNone_TypeModifier);
Greg Daniel164a9f02016-02-22 09:56:40 -0500169
egdaniel09aa1fc2016-04-20 07:09:46 -0700170 uint32_t* currentOffset = kVertex_GrShaderFlag == visibility ? &fCurrentVertexUBOOffset
171 : &fCurrentFragmentUBOOffset;
172 get_ubo_aligned_offset(&uni.fUBOffset, currentOffset, type, arrayCount);
Greg Daniel164a9f02016-02-22 09:56:40 -0500173
egdaniel09aa1fc2016-04-20 07:09:46 -0700174 if (outName) {
175 *outName = uni.fVariable.c_str();
Greg Daniel164a9f02016-02-22 09:56:40 -0500176 }
177
178 return GrGLSLUniformHandler::UniformHandle(fUniforms.count() - 1);
179}
180
egdaniel09aa1fc2016-04-20 07:09:46 -0700181GrGLSLUniformHandler::SamplerHandle GrVkUniformHandler::internalAddSampler(uint32_t visibility,
182 GrPixelConfig config,
183 GrSLType type,
184 GrSLPrecision precision,
185 const char* name) {
186 SkASSERT(name && strlen(name));
187 SkDEBUGCODE(static const uint32_t kVisMask = kVertex_GrShaderFlag | kFragment_GrShaderFlag);
188 SkASSERT(0 == (~kVisMask & visibility));
189 SkASSERT(0 != visibility);
190 SkString mangleName;
191 char prefix = 'u';
192 fProgramBuilder->nameVariable(&mangleName, prefix, name, true);
193 fSamplers.emplace_back(visibility, config, type, precision, mangleName.c_str(),
194 (uint32_t)fSamplers.count(), kSamplerDescSet);
195 return GrGLSLUniformHandler::SamplerHandle(fSamplers.count() - 1);
196}
197
Greg Daniel164a9f02016-02-22 09:56:40 -0500198void GrVkUniformHandler::appendUniformDecls(GrShaderFlags visibility, SkString* out) const {
egdanielb8002482016-04-19 15:24:29 -0700199 SkASSERT(kVertex_GrShaderFlag == visibility || kFragment_GrShaderFlag == visibility);
egdaniel09aa1fc2016-04-20 07:09:46 -0700200
201 for (int i = 0; i < fSamplers.count(); ++i) {
202 const GrVkGLSLSampler& sampler = fSamplers[i];
egdaniel990dbc82016-07-13 14:09:30 -0700203 SkASSERT(sampler.type() == kTexture2DSampler_GrSLType);
egdaniel09aa1fc2016-04-20 07:09:46 -0700204 if (visibility == sampler.visibility()) {
205 sampler.fShaderVar.appendDecl(fProgramBuilder->glslCaps(), out);
206 out->append(";\n");
207 }
208 }
209
210 SkString uniformsString;
Greg Daniel164a9f02016-02-22 09:56:40 -0500211 for (int i = 0; i < fUniforms.count(); ++i) {
212 const UniformInfo& localUniform = fUniforms[i];
213 if (visibility == localUniform.fVisibility) {
214 if (GrSLTypeIsFloatType(localUniform.fVariable.getType())) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500215 localUniform.fVariable.appendDecl(fProgramBuilder->glslCaps(), &uniformsString);
216 uniformsString.append(";\n");
Greg Daniel164a9f02016-02-22 09:56:40 -0500217 }
218 }
219 }
220 if (!uniformsString.isEmpty()) {
egdaniel09aa1fc2016-04-20 07:09:46 -0700221 uint32_t uniformBinding = (visibility == kVertex_GrShaderFlag) ? kVertexBinding
222 : kFragBinding;
Greg Daniel164a9f02016-02-22 09:56:40 -0500223 const char* stage = (visibility == kVertex_GrShaderFlag) ? "vertex" : "fragment";
224 out->appendf("layout (set=%d, binding=%d) uniform %sUniformBuffer\n{\n",
225 kUniformBufferDescSet, uniformBinding, stage);
226 out->appendf("%s\n};\n", uniformsString.c_str());
227 }
egdaniel4ee1cda2016-02-26 08:18:49 -0800228}