blob: 07be0bd7fef8a36b8bc6e247615d02ba066ca6e2 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Luc Ferron48cdc2e2018-05-31 09:58:34 -040015#include "libANGLE/renderer/renderer_utils.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050016#include "libANGLE/renderer/vulkan/ContextVk.h"
17#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
18#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040019#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040020
21namespace rx
22{
23
Jamie Madill76e471e2017-10-21 09:56:01 -040024namespace
25{
26
Jamie Madillf3614372018-03-31 14:19:14 -040027constexpr size_t kUniformBlockDynamicBufferMinSize = 256 * 128;
Luc Ferron7a06ac12018-03-15 10:17:04 -040028
Jamie Madill21061022018-07-12 23:56:30 -040029void InitDefaultUniformBlock(const gl::Context *context,
30 gl::Shader *shader,
31 sh::BlockLayoutMap *blockLayoutMapOut,
32 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040033{
34 const auto &uniforms = shader->getUniforms(context);
35
36 if (uniforms.empty())
37 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040038 *blockSizeOut = 0;
Jamie Madill21061022018-07-12 23:56:30 -040039 return;
Jamie Madill76e471e2017-10-21 09:56:01 -040040 }
41
42 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020043 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040044
45 size_t blockSize = blockEncoder.getBlockSize();
46
47 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
48 if (blockSize == 0)
49 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040050 *blockSizeOut = 0;
Jamie Madill21061022018-07-12 23:56:30 -040051 return;
Jamie Madill76e471e2017-10-21 09:56:01 -040052 }
53
Luc Ferron7a06ac12018-03-15 10:17:04 -040054 *blockSizeOut = blockSize;
Jamie Madill21061022018-07-12 23:56:30 -040055 return;
Jamie Madill76e471e2017-10-21 09:56:01 -040056}
57
58template <typename T>
59void UpdateDefaultUniformBlock(GLsizei count,
Luc Ferron2371aca2018-03-27 16:03:03 -040060 uint32_t arrayIndex,
Jamie Madill76e471e2017-10-21 09:56:01 -040061 int componentCount,
62 const T *v,
63 const sh::BlockMemberInfo &layoutInfo,
64 angle::MemoryBuffer *uniformData)
65{
Luc Ferron2371aca2018-03-27 16:03:03 -040066 const int elementSize = sizeof(T) * componentCount;
67
68 uint8_t *dst = uniformData->data() + layoutInfo.offset;
Jamie Madill76e471e2017-10-21 09:56:01 -040069 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
70 {
Luc Ferron2371aca2018-03-27 16:03:03 -040071 uint32_t arrayOffset = arrayIndex * layoutInfo.arrayStride;
72 uint8_t *writePtr = dst + arrayOffset;
Jamie Madill76e471e2017-10-21 09:56:01 -040073 memcpy(writePtr, v, elementSize * count);
74 }
75 else
76 {
Luc Ferron2371aca2018-03-27 16:03:03 -040077 // Have to respect the arrayStride between each element of the array.
78 int maxIndex = arrayIndex + count;
79 for (int writeIndex = arrayIndex, readIndex = 0; writeIndex < maxIndex;
80 writeIndex++, readIndex++)
81 {
82 const int arrayOffset = writeIndex * layoutInfo.arrayStride;
83 uint8_t *writePtr = dst + arrayOffset;
Luc Ferrone9465a62018-06-04 10:41:52 -040084 const T *readPtr = v + (readIndex * componentCount);
Luc Ferron2371aca2018-03-27 16:03:03 -040085 memcpy(writePtr, readPtr, elementSize);
86 }
Jamie Madill76e471e2017-10-21 09:56:01 -040087 }
88}
89
Luc Ferron7cec3352018-03-13 13:29:34 -040090template <typename T>
91void ReadFromDefaultUniformBlock(int componentCount,
Luc Ferron2371aca2018-03-27 16:03:03 -040092 uint32_t arrayIndex,
Luc Ferron7cec3352018-03-13 13:29:34 -040093 T *dst,
94 const sh::BlockMemberInfo &layoutInfo,
95 const angle::MemoryBuffer *uniformData)
96{
97 ASSERT(layoutInfo.offset != -1);
98
Luc Ferron2371aca2018-03-27 16:03:03 -040099 const int elementSize = sizeof(T) * componentCount;
100 const uint8_t *source = uniformData->data() + layoutInfo.offset;
101
Luc Ferron7cec3352018-03-13 13:29:34 -0400102 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
103 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400104 const uint8_t *readPtr = source + arrayIndex * layoutInfo.arrayStride;
Luc Ferron7cec3352018-03-13 13:29:34 -0400105 memcpy(dst, readPtr, elementSize);
106 }
107 else
108 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400109 // Have to respect the arrayStride between each element of the array.
110 const int arrayOffset = arrayIndex * layoutInfo.arrayStride;
111 const uint8_t *readPtr = source + arrayOffset;
112 memcpy(dst, readPtr, elementSize);
Luc Ferron7cec3352018-03-13 13:29:34 -0400113 }
114}
115
Jamie Madill21061022018-07-12 23:56:30 -0400116angle::Result SyncDefaultUniformBlock(ContextVk *contextVk,
117 vk::DynamicBuffer *dynamicBuffer,
118 const angle::MemoryBuffer &bufferData,
119 uint32_t *outOffset,
120 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400121{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400122 ASSERT(!bufferData.empty());
123 uint8_t *data = nullptr;
124 VkBuffer *outBuffer = nullptr;
125 uint32_t offset;
Jamie Madill21061022018-07-12 23:56:30 -0400126 ANGLE_TRY(dynamicBuffer->allocate(contextVk, bufferData.size(), &data, outBuffer, &offset,
Jamie Madillc3755fc2018-04-05 08:39:13 -0400127 outBufferModified));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400128 *outOffset = offset;
129 memcpy(data, bufferData.data(), bufferData.size());
Jamie Madill21061022018-07-12 23:56:30 -0400130 ANGLE_TRY(dynamicBuffer->flush(contextVk));
131 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400132}
Jamie Madill76e471e2017-10-21 09:56:01 -0400133} // anonymous namespace
134
135ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400136 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
Jamie Madillf3614372018-03-31 14:19:14 -0400137 kUniformBlockDynamicBufferMinSize),
Luc Ferron7a06ac12018-03-15 10:17:04 -0400138 uniformData(),
139 uniformsDirty(false),
140 uniformLayout()
Jamie Madill76e471e2017-10-21 09:56:01 -0400141{
142}
143
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500144ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
145{
146}
147
Jamie Madill76e471e2017-10-21 09:56:01 -0400148ProgramVk::ProgramVk(const gl::ProgramState &state)
Luc Ferron7a06ac12018-03-15 10:17:04 -0400149 : ProgramImpl(state),
150 mDefaultUniformBlocks(),
151 mUniformBlocksOffsets(),
152 mUsedDescriptorSetRange(),
153 mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400154{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400155 mUniformBlocksOffsets.fill(0);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500156 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400157}
158
159ProgramVk::~ProgramVk()
160{
161}
162
Jamie Madillb7d924a2018-03-10 11:16:54 -0500163gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500164{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500165 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500166 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400167}
Jamie Madill5deea722017-02-16 10:44:46 -0500168
Jamie Madill21061022018-07-12 23:56:30 -0400169angle::Result ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400170{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500171 VkDevice device = contextVk->getDevice();
172
Jamie Madill9b168d02018-06-13 13:25:32 -0400173 for (auto &descriptorSetLayout : mDescriptorSetLayouts)
174 {
175 descriptorSetLayout.reset();
176 }
177 mPipelineLayout.reset();
178
Jamie Madillcaaff162018-06-22 08:55:37 -0400179 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400180 for (auto &uniformBlock : mDefaultUniformBlocks)
181 {
Jamie Madillcaaff162018-06-22 08:55:37 -0400182 uniformBlock.storage.release(renderer);
Jamie Madill76e471e2017-10-21 09:56:01 -0400183 }
184
Jamie Madillcaaff162018-06-22 08:55:37 -0400185 Serial currentSerial = renderer->getCurrentQueueSerial();
186 renderer->releaseObject(currentSerial, &mEmptyUniformBlockStorage.memory);
187 renderer->releaseObject(currentSerial, &mEmptyUniformBlockStorage.buffer);
Jamie Madill76e471e2017-10-21 09:56:01 -0400188
Jamie Madill06ca6342018-07-12 15:56:53 -0400189 mDefaultVertexShaderAndSerial.destroy(device);
190 mDefaultFragmentShaderAndSerial.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -0400191
Jamie Madill5547b382017-10-23 18:16:01 -0400192 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500193 mUsedDescriptorSetRange.invalidate();
Jamie Madill50cf2be2018-06-15 09:46:57 -0400194 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500195
Jamie Madill21061022018-07-12 23:56:30 -0400196 return angle::Result::Continue();
Jamie Madill5deea722017-02-16 10:44:46 -0500197}
198
Jamie Madill9cf9e872017-06-05 12:59:25 -0400199gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
200 gl::InfoLog &infoLog,
201 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400202{
203 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500204 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400205}
206
Jamie Madill27a60632017-06-30 15:12:01 -0400207void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400208{
209 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400210}
211
212void ProgramVk::setBinaryRetrievableHint(bool retrievable)
213{
214 UNIMPLEMENTED();
215}
216
Yunchao He61afff12017-03-14 15:34:03 +0800217void ProgramVk::setSeparable(bool separable)
218{
219 UNIMPLEMENTED();
220}
221
Jamie Madill9cf9e872017-06-05 12:59:25 -0400222gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500223 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400224 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400225{
Jamie Madill06ca6342018-07-12 15:56:53 -0400226 ContextVk *contextVk = vk::GetImpl(glContext);
227 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc5143482017-10-15 20:20:06 -0400228
Jamie Madillb7d924a2018-03-10 11:16:54 -0500229 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500230
Jamie Madill06ca6342018-07-12 15:56:53 -0400231 std::string vertexSource;
232 std::string fragmentSource;
233 GlslangWrapper::GetShaderSource(glContext, mState, resources, &vertexSource, &fragmentSource);
234
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500235 std::vector<uint32_t> vertexCode;
236 std::vector<uint32_t> fragmentCode;
237 bool linkSuccess = false;
Jamie Madill06ca6342018-07-12 15:56:53 -0400238 ANGLE_TRY_RESULT(GlslangWrapper::GetShaderCode(glContext->getCaps(), vertexSource,
239 fragmentSource, &vertexCode, &fragmentCode),
Luc Ferronc252d752018-06-14 09:32:40 -0400240 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500241 if (!linkSuccess)
242 {
243 return false;
244 }
245
Jamie Madilld34aa362018-07-12 15:56:54 -0400246 ANGLE_TRY(vk::InitShaderAndSerial(contextVk, &mDefaultVertexShaderAndSerial, vertexCode.data(),
247 vertexCode.size() * sizeof(uint32_t)));
248 ANGLE_TRY(vk::InitShaderAndSerial(contextVk, &mDefaultFragmentShaderAndSerial,
249 fragmentCode.data(), fragmentCode.size() * sizeof(uint32_t)));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500250
Jamie Madill76e471e2017-10-21 09:56:01 -0400251 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500252
Jamie Madill8c3988c2017-12-21 14:44:56 -0500253 if (!mState.getSamplerUniformRange().empty())
254 {
255 // Ensure the descriptor set range includes the textures at position 1.
Jamie Madill9b168d02018-06-13 13:25:32 -0400256 mUsedDescriptorSetRange.extend(kTextureDescriptorSetIndex);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500257 mDirtyTextures = true;
258 }
259
Jamie Madill9b168d02018-06-13 13:25:32 -0400260 // Store a reference to the pipeline and descriptor set layouts. This will create them if they
261 // don't already exist in the cache.
262 vk::DescriptorSetLayoutDesc uniformsSetDesc;
263 uniformsSetDesc.update(kVertexUniformsBindingIndex, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
264 1);
265 uniformsSetDesc.update(kFragmentUniformsBindingIndex, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
266 1);
267
268 ANGLE_TRY(renderer->getDescriptorSetLayout(
Jamie Madill21061022018-07-12 23:56:30 -0400269 contextVk, uniformsSetDesc, &mDescriptorSetLayouts[kUniformsDescriptorSetIndex]));
Jamie Madill9b168d02018-06-13 13:25:32 -0400270
Jamie Madill9b168d02018-06-13 13:25:32 -0400271 vk::DescriptorSetLayoutDesc texturesSetDesc;
Jamie Madill4cc753e2018-06-13 13:25:33 -0400272
273 for (uint32_t textureIndex = 0; textureIndex < mState.getSamplerBindings().size();
274 ++textureIndex)
Jamie Madill9b168d02018-06-13 13:25:32 -0400275 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400276 const gl::SamplerBinding &samplerBinding = mState.getSamplerBindings()[textureIndex];
277
278 // The front-end always binds array sampler units sequentially.
279 const uint32_t count = static_cast<uint32_t>(samplerBinding.boundTextureUnits.size());
280 texturesSetDesc.update(textureIndex, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, count);
Jamie Madill9b168d02018-06-13 13:25:32 -0400281 }
282
Jamie Madill21061022018-07-12 23:56:30 -0400283 ANGLE_TRY(renderer->getDescriptorSetLayout(contextVk, texturesSetDesc,
Jamie Madill9b168d02018-06-13 13:25:32 -0400284 &mDescriptorSetLayouts[kTextureDescriptorSetIndex]));
285
Jamie Madillb01b4802018-07-10 12:43:57 -0400286 vk::DescriptorSetLayoutDesc driverUniformsSetDesc;
287 driverUniformsSetDesc.update(0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1);
288 ANGLE_TRY(renderer->getDescriptorSetLayout(
Jamie Madill21061022018-07-12 23:56:30 -0400289 contextVk, driverUniformsSetDesc,
290 &mDescriptorSetLayouts[kDriverUniformsDescriptorSetIndex]));
Jamie Madillb01b4802018-07-10 12:43:57 -0400291
Jamie Madill9b168d02018-06-13 13:25:32 -0400292 vk::PipelineLayoutDesc pipelineLayoutDesc;
293 pipelineLayoutDesc.updateDescriptorSetLayout(kUniformsDescriptorSetIndex, uniformsSetDesc);
294 pipelineLayoutDesc.updateDescriptorSetLayout(kTextureDescriptorSetIndex, texturesSetDesc);
Jamie Madillb01b4802018-07-10 12:43:57 -0400295 pipelineLayoutDesc.updateDescriptorSetLayout(kDriverUniformsDescriptorSetIndex,
296 driverUniformsSetDesc);
Jamie Madill9b168d02018-06-13 13:25:32 -0400297
Jamie Madill21061022018-07-12 23:56:30 -0400298 ANGLE_TRY(renderer->getPipelineLayout(contextVk, pipelineLayoutDesc, mDescriptorSetLayouts,
299 &mPipelineLayout));
Jamie Madill9b168d02018-06-13 13:25:32 -0400300
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500301 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400302}
303
Jamie Madill76e471e2017-10-21 09:56:01 -0400304gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
305{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400306 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500307 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400308
309 // Process vertex and fragment uniforms into std140 packing.
Jamie Madill33318de2018-05-01 11:22:54 -0400310 vk::ShaderMap<sh::BlockLayoutMap> layoutMap;
311 vk::ShaderMap<size_t> requiredBufferSize;
312 requiredBufferSize.fill(0);
Jamie Madill76e471e2017-10-21 09:56:01 -0400313
Jamie Madill33318de2018-05-01 11:22:54 -0400314 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400315 {
Jamie Madill33318de2018-05-01 11:22:54 -0400316 gl::ShaderType glShaderType = static_cast<gl::ShaderType>(shaderType);
Jamie Madill21061022018-07-12 23:56:30 -0400317 InitDefaultUniformBlock(glContext, mState.getAttachedShader(glShaderType),
318 &layoutMap[shaderType], &requiredBufferSize[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400319 }
320
321 // Init the default block layout info.
322 const auto &locations = mState.getUniformLocations();
323 const auto &uniforms = mState.getUniforms();
324 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
325 {
Jamie Madill33318de2018-05-01 11:22:54 -0400326 vk::ShaderMap<sh::BlockMemberInfo> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400327
328 const auto &location = locations[locationIndex];
329 if (location.used() && !location.ignored)
330 {
Jamie Madillde03e002017-10-21 14:04:20 -0400331 const auto &uniform = uniforms[location.index];
332
333 if (uniform.isSampler())
334 continue;
335
Jamie Madill76e471e2017-10-21 09:56:01 -0400336 std::string uniformName = uniform.name;
337 if (uniform.isArray())
338 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400339 // Gets the uniform name without the [0] at the end.
340 uniformName = gl::ParseResourceName(uniformName, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400341 }
342
343 bool found = false;
344
Jamie Madill33318de2018-05-01 11:22:54 -0400345 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400346 {
Jamie Madill33318de2018-05-01 11:22:54 -0400347 auto it = layoutMap[shaderType].find(uniformName);
348 if (it != layoutMap[shaderType].end())
Jamie Madill76e471e2017-10-21 09:56:01 -0400349 {
Jamie Madill50cf2be2018-06-15 09:46:57 -0400350 found = true;
351 layoutInfo[shaderType] = it->second;
Jamie Madill76e471e2017-10-21 09:56:01 -0400352 }
353 }
354
355 ASSERT(found);
356 }
357
Jamie Madill33318de2018-05-01 11:22:54 -0400358 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400359 {
Jamie Madill33318de2018-05-01 11:22:54 -0400360 mDefaultUniformBlocks[shaderType].uniformLayout.push_back(layoutInfo[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400361 }
362 }
363
364 bool anyDirty = false;
365 bool allDirty = true;
366
Jamie Madill33318de2018-05-01 11:22:54 -0400367 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400368 {
Jamie Madill33318de2018-05-01 11:22:54 -0400369 if (requiredBufferSize[shaderType] > 0)
Jamie Madill76e471e2017-10-21 09:56:01 -0400370 {
Jamie Madill33318de2018-05-01 11:22:54 -0400371 if (!mDefaultUniformBlocks[shaderType].uniformData.resize(
372 requiredBufferSize[shaderType]))
Jamie Madill76e471e2017-10-21 09:56:01 -0400373 {
374 return gl::OutOfMemory() << "Memory allocation failure.";
375 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400376 size_t minAlignment = static_cast<size_t>(
377 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
378
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400379 mDefaultUniformBlocks[shaderType].storage.init(minAlignment, renderer);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400380
381 // Initialize uniform buffer memory to zero by default.
Jamie Madill33318de2018-05-01 11:22:54 -0400382 mDefaultUniformBlocks[shaderType].uniformData.fill(0);
383 mDefaultUniformBlocks[shaderType].uniformsDirty = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400384
385 anyDirty = true;
386 }
387 else
388 {
389 allDirty = false;
390 }
391 }
392
393 if (anyDirty)
394 {
395 // Initialize the "empty" uniform block if necessary.
396 if (!allDirty)
397 {
398 VkBufferCreateInfo uniformBufferInfo;
399 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
400 uniformBufferInfo.pNext = nullptr;
401 uniformBufferInfo.flags = 0;
402 uniformBufferInfo.size = 1;
403 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
404 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
405 uniformBufferInfo.queueFamilyIndexCount = 0;
406 uniformBufferInfo.pQueueFamilyIndices = nullptr;
407
Jamie Madill21061022018-07-12 23:56:30 -0400408 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(contextVk, uniformBufferInfo));
Jamie Madill76e471e2017-10-21 09:56:01 -0400409
Luc Ferron7a06ac12018-03-15 10:17:04 -0400410 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500411 VkMemoryPropertyFlags flags =
412 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill21061022018-07-12 23:56:30 -0400413 ANGLE_TRY(AllocateBufferMemory(contextVk, flags, &mEmptyUniformBlockStorage.buffer,
Luc Ferrona72ebeb2018-06-28 11:16:58 -0400414 &mEmptyUniformBlockStorage.memory));
Jamie Madill76e471e2017-10-21 09:56:01 -0400415 }
416
Jamie Madill8c3988c2017-12-21 14:44:56 -0500417 // Ensure the descriptor set range includes the uniform buffers at position 0.
418 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400419 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400420
421 return gl::NoError();
422}
423
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400424GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
425{
Luc Ferronfba1f612018-06-04 14:37:17 -0400426 // No-op. The spec is very vague about the behavior of validation.
427 return GL_TRUE;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400428}
429
Jamie Madill76e471e2017-10-21 09:56:01 -0400430template <typename T>
431void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
432{
433 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
434 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
435
Luc Ferron7cec3352018-03-13 13:29:34 -0400436 if (linkedUniform.isSampler())
437 {
Jamie Madill0cb6dc42018-04-16 10:36:39 -0400438 // We could potentially cache some indexing here. For now this is a no-op since the mapping
439 // is handled entirely in ContextVk.
Luc Ferron7cec3352018-03-13 13:29:34 -0400440 return;
441 }
442
Luc Ferron24a31372018-04-04 11:49:14 -0400443 if (linkedUniform.typeInfo->type == entryPointType)
Jamie Madill76e471e2017-10-21 09:56:01 -0400444 {
Luc Ferron24a31372018-04-04 11:49:14 -0400445 for (auto &uniformBlock : mDefaultUniformBlocks)
Jamie Madill76e471e2017-10-21 09:56:01 -0400446 {
Luc Ferron24a31372018-04-04 11:49:14 -0400447 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400448
Luc Ferron24a31372018-04-04 11:49:14 -0400449 // Assume an offset of -1 means the block is unused.
450 if (layoutInfo.offset == -1)
451 {
452 continue;
453 }
454
455 const GLint componentCount = linkedUniform.typeInfo->componentCount;
Luc Ferron62059a52018-03-29 07:01:35 -0400456 UpdateDefaultUniformBlock(count, locationInfo.arrayIndex, componentCount, v, layoutInfo,
457 &uniformBlock.uniformData);
Luc Ferron24a31372018-04-04 11:49:14 -0400458 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400459 }
Luc Ferron24a31372018-04-04 11:49:14 -0400460 }
461 else
462 {
463 for (auto &uniformBlock : mDefaultUniformBlocks)
Luc Ferron62059a52018-03-29 07:01:35 -0400464 {
Luc Ferron24a31372018-04-04 11:49:14 -0400465 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
466
467 // Assume an offset of -1 means the block is unused.
468 if (layoutInfo.offset == -1)
469 {
470 continue;
471 }
472
473 const GLint componentCount = linkedUniform.typeInfo->componentCount;
474
Luc Ferron62059a52018-03-29 07:01:35 -0400475 ASSERT(linkedUniform.typeInfo->type == gl::VariableBoolVectorType(entryPointType));
476
Luc Ferrond91c3792018-04-06 09:36:36 -0400477 GLint initialArrayOffset =
478 locationInfo.arrayIndex * layoutInfo.arrayStride + layoutInfo.offset;
Luc Ferron62059a52018-03-29 07:01:35 -0400479 for (GLint i = 0; i < count; i++)
480 {
481 GLint elementOffset = i * layoutInfo.arrayStride + initialArrayOffset;
482 GLint *dest =
483 reinterpret_cast<GLint *>(uniformBlock.uniformData.data() + elementOffset);
484 const T *source = v + i * componentCount;
485
486 for (int c = 0; c < componentCount; c++)
487 {
488 dest[c] = (source[c] == static_cast<T>(0)) ? GL_FALSE : GL_TRUE;
489 }
490 }
Luc Ferron24a31372018-04-04 11:49:14 -0400491 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400492 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400493 }
494}
495
Luc Ferron7cec3352018-03-13 13:29:34 -0400496template <typename T>
497void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
498{
499 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
500 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
501
Jamie Madill3bb2bbe2018-06-15 09:47:03 -0400502 ASSERT(!linkedUniform.isSampler());
Luc Ferron7cec3352018-03-13 13:29:34 -0400503
Olli Etuaho107c7242018-03-20 15:45:35 +0200504 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800505 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400506
Jiawei Shao385b3e02018-03-21 09:43:28 +0800507 const DefaultUniformBlock &uniformBlock =
Jamie Madill33318de2018-05-01 11:22:54 -0400508 mDefaultUniformBlocks[static_cast<vk::ShaderType>(shaderType)];
Jamie Madill50cf2be2018-06-15 09:46:57 -0400509 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400510
511 ASSERT(linkedUniform.typeInfo->componentType == entryPointType ||
512 linkedUniform.typeInfo->componentType == gl::VariableBoolVectorType(entryPointType));
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400513
514 if (gl::IsMatrixType(linkedUniform.type))
515 {
516 const uint8_t *ptrToElement = uniformBlock.uniformData.data() + layoutInfo.offset +
Luc Ferronf6fd48f2018-06-18 08:11:27 -0400517 (locationInfo.arrayIndex * layoutInfo.arrayStride);
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400518 GetMatrixUniform(linkedUniform.type, v, reinterpret_cast<const T *>(ptrToElement), false);
519 }
520 else
521 {
522 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, locationInfo.arrayIndex,
523 v, layoutInfo, &uniformBlock.uniformData);
524 }
Luc Ferron7cec3352018-03-13 13:29:34 -0400525}
526
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400527void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
528{
Jamie Madill76e471e2017-10-21 09:56:01 -0400529 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400530}
531
532void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
533{
Jamie Madill76e471e2017-10-21 09:56:01 -0400534 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400535}
536
537void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
538{
Jamie Madill76e471e2017-10-21 09:56:01 -0400539 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400540}
541
542void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
543{
Jamie Madill76e471e2017-10-21 09:56:01 -0400544 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400545}
546
547void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
548{
Luc Ferron7cec3352018-03-13 13:29:34 -0400549 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400550}
551
552void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
553{
Luc Ferron489243f2018-03-28 16:55:28 -0400554 setUniformImpl(location, count, v, GL_INT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400555}
556
557void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
558{
Luc Ferron489243f2018-03-28 16:55:28 -0400559 setUniformImpl(location, count, v, GL_INT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400560}
561
562void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
563{
Luc Ferron489243f2018-03-28 16:55:28 -0400564 setUniformImpl(location, count, v, GL_INT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400565}
566
567void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
568{
569 UNIMPLEMENTED();
570}
571
572void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
573{
574 UNIMPLEMENTED();
575}
576
577void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
578{
579 UNIMPLEMENTED();
580}
581
582void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
583{
584 UNIMPLEMENTED();
585}
586
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400587template <int cols, int rows>
588void ProgramVk::setUniformMatrixfv(GLint location,
589 GLsizei count,
590 GLboolean transpose,
591 const GLfloat *value)
592{
593 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
594 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
595
596 for (auto &uniformBlock : mDefaultUniformBlocks)
597 {
598 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
599
600 // Assume an offset of -1 means the block is unused.
601 if (layoutInfo.offset == -1)
602 {
603 continue;
604 }
605
Luc Ferronc8fbff32018-06-04 10:30:48 -0400606 bool updated = SetFloatUniformMatrix<cols, rows>(
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400607 locationInfo.arrayIndex, linkedUniform.getArraySizeProduct(), count, transpose, value,
608 uniformBlock.uniformData.data() + layoutInfo.offset);
Luc Ferronc8fbff32018-06-04 10:30:48 -0400609
610 // If the uniformsDirty flag was true, we don't want to flip it to false here if the
611 // setter did not update any data. We still want the uniform to be included when we'll
612 // update the descriptor sets.
613 uniformBlock.uniformsDirty = uniformBlock.uniformsDirty || updated;
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400614 }
615}
616
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400617void ProgramVk::setUniformMatrix2fv(GLint location,
618 GLsizei count,
619 GLboolean transpose,
620 const GLfloat *value)
621{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400622 setUniformMatrixfv<2, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400623}
624
625void ProgramVk::setUniformMatrix3fv(GLint location,
626 GLsizei count,
627 GLboolean transpose,
628 const GLfloat *value)
629{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400630 setUniformMatrixfv<3, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400631}
632
633void ProgramVk::setUniformMatrix4fv(GLint location,
634 GLsizei count,
635 GLboolean transpose,
636 const GLfloat *value)
637{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400638 setUniformMatrixfv<4, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400639}
640
641void ProgramVk::setUniformMatrix2x3fv(GLint location,
642 GLsizei count,
643 GLboolean transpose,
644 const GLfloat *value)
645{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400646 setUniformMatrixfv<2, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400647}
648
649void ProgramVk::setUniformMatrix3x2fv(GLint location,
650 GLsizei count,
651 GLboolean transpose,
652 const GLfloat *value)
653{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400654 setUniformMatrixfv<3, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400655}
656
657void ProgramVk::setUniformMatrix2x4fv(GLint location,
658 GLsizei count,
659 GLboolean transpose,
660 const GLfloat *value)
661{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400662 setUniformMatrixfv<2, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400663}
664
665void ProgramVk::setUniformMatrix4x2fv(GLint location,
666 GLsizei count,
667 GLboolean transpose,
668 const GLfloat *value)
669{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400670 setUniformMatrixfv<4, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400671}
672
673void ProgramVk::setUniformMatrix3x4fv(GLint location,
674 GLsizei count,
675 GLboolean transpose,
676 const GLfloat *value)
677{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400678 setUniformMatrixfv<3, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400679}
680
681void ProgramVk::setUniformMatrix4x3fv(GLint location,
682 GLsizei count,
683 GLboolean transpose,
684 const GLfloat *value)
685{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400686 setUniformMatrixfv<4, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400687}
688
689void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
690{
691 UNIMPLEMENTED();
692}
693
Sami Väisänen46eaa942016-06-29 10:26:37 +0300694void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
695 GLenum genMode,
696 GLint components,
697 const GLfloat *coeffs)
698{
699 UNIMPLEMENTED();
700}
701
Jamie Madill06ca6342018-07-12 15:56:53 -0400702gl::Error ProgramVk::initShaders(const ContextVk *contextVk,
703 const gl::DrawCallParams &drawCallParams,
704 const vk::ShaderAndSerial **vertexShaderAndSerialOut,
705 const vk::ShaderAndSerial **fragmentShaderAndSerialOut)
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500706{
Jamie Madill06ca6342018-07-12 15:56:53 -0400707 // TODO(jmadill): Move more init into this method. http://anglebug.com/2598
708 // TODO(jmadill): Line rasterization emulation shaders. http://anglebug.com/2598
709 ASSERT(mDefaultVertexShaderAndSerial.valid());
710 ASSERT(mDefaultFragmentShaderAndSerial.valid());
711 *vertexShaderAndSerialOut = &mDefaultVertexShaderAndSerial;
712 *fragmentShaderAndSerialOut = &mDefaultFragmentShaderAndSerial;
713 return gl::NoError();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500714}
715
Jamie Madill21061022018-07-12 23:56:30 -0400716angle::Result ProgramVk::allocateDescriptorSet(ContextVk *contextVk, uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400717{
Jamie Madill76e471e2017-10-21 09:56:01 -0400718 // Write out to a new a descriptor set.
Jamie Madilledeaa832018-06-22 09:18:41 -0400719 vk::DynamicDescriptorPool *dynamicDescriptorPool =
720 contextVk->getDynamicDescriptorPool(descriptorSetIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400721
Luc Ferron6ea1b412018-03-21 16:13:01 -0400722 uint32_t potentialNewCount = descriptorSetIndex + 1;
723 if (potentialNewCount > mDescriptorSets.size())
724 {
725 mDescriptorSets.resize(potentialNewCount, VK_NULL_HANDLE);
726 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400727
Jamie Madillc7918ce2018-06-13 13:25:31 -0400728 const vk::DescriptorSetLayout &descriptorSetLayout =
Jamie Madill9b168d02018-06-13 13:25:32 -0400729 mDescriptorSetLayouts[descriptorSetIndex].get();
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400730 ANGLE_TRY(dynamicDescriptorPool->allocateSets(contextVk, descriptorSetLayout.ptr(), 1,
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400731 &mDescriptorSets[descriptorSetIndex]));
Jamie Madill21061022018-07-12 23:56:30 -0400732 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400733}
734
Jamie Madill54164b02017-08-28 15:17:37 -0400735void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
736{
Luc Ferron7cec3352018-03-13 13:29:34 -0400737 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400738}
739
740void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
741{
Luc Ferron7cec3352018-03-13 13:29:34 -0400742 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400743}
744
745void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
746{
747 UNIMPLEMENTED();
748}
749
Jamie Madill21061022018-07-12 23:56:30 -0400750angle::Result ProgramVk::updateUniforms(ContextVk *contextVk)
Jamie Madill76e471e2017-10-21 09:56:01 -0400751{
Jamie Madill33318de2018-05-01 11:22:54 -0400752 if (!mDefaultUniformBlocks[vk::ShaderType::VertexShader].uniformsDirty &&
753 !mDefaultUniformBlocks[vk::ShaderType::FragmentShader].uniformsDirty)
Jamie Madill76e471e2017-10-21 09:56:01 -0400754 {
Jamie Madill21061022018-07-12 23:56:30 -0400755 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400756 }
757
Jamie Madill8c3988c2017-12-21 14:44:56 -0500758 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400759
Jamie Madill76e471e2017-10-21 09:56:01 -0400760 // Update buffer memory by immediate mapping. This immediate update only works once.
761 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400762 bool anyNewBufferAllocated = false;
Jamie Madill33318de2018-05-01 11:22:54 -0400763 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400764 {
Jamie Madill33318de2018-05-01 11:22:54 -0400765 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400766
Jamie Madill76e471e2017-10-21 09:56:01 -0400767 if (uniformBlock.uniformsDirty)
768 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400769 bool bufferModified = false;
Jamie Madill21061022018-07-12 23:56:30 -0400770 ANGLE_TRY(SyncDefaultUniformBlock(contextVk, &uniformBlock.storage,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400771 uniformBlock.uniformData,
Jamie Madill33318de2018-05-01 11:22:54 -0400772 &mUniformBlocksOffsets[shaderType], &bufferModified));
Jamie Madill76e471e2017-10-21 09:56:01 -0400773 uniformBlock.uniformsDirty = false;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400774
775 if (bufferModified)
776 {
777 anyNewBufferAllocated = true;
778 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400779 }
780 }
781
Luc Ferron7a06ac12018-03-15 10:17:04 -0400782 if (anyNewBufferAllocated)
783 {
784 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
785 // modify the descriptor sets once initialized.
Jamie Madillc7918ce2018-06-13 13:25:31 -0400786 ANGLE_TRY(allocateDescriptorSet(contextVk, kUniformsDescriptorSetIndex));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400787 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
788 }
789
Jamie Madill21061022018-07-12 23:56:30 -0400790 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400791}
792
Jamie Madill21061022018-07-12 23:56:30 -0400793angle::Result ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
Jamie Madill76e471e2017-10-21 09:56:01 -0400794{
Jamie Madill33318de2018-05-01 11:22:54 -0400795 vk::ShaderMap<VkDescriptorBufferInfo> descriptorBufferInfo;
796 vk::ShaderMap<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400797
Jamie Madill33318de2018-05-01 11:22:54 -0400798 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400799 {
Jamie Madill33318de2018-05-01 11:22:54 -0400800 auto &uniformBlock = mDefaultUniformBlocks[shaderType];
801 auto &bufferInfo = descriptorBufferInfo[shaderType];
802 auto &writeInfo = writeDescriptorInfo[shaderType];
Jamie Madill76e471e2017-10-21 09:56:01 -0400803
804 if (!uniformBlock.uniformData.empty())
805 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400806 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400807 }
808 else
809 {
810 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
811 }
812
813 bufferInfo.offset = 0;
814 bufferInfo.range = VK_WHOLE_SIZE;
815
Jamie Madill76e471e2017-10-21 09:56:01 -0400816 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
817 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400818 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill33318de2018-05-01 11:22:54 -0400819 writeInfo.dstBinding = static_cast<uint32_t>(shaderType);
Jamie Madill76e471e2017-10-21 09:56:01 -0400820 writeInfo.dstArrayElement = 0;
821 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400822 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400823 writeInfo.pImageInfo = nullptr;
824 writeInfo.pBufferInfo = &bufferInfo;
825 writeInfo.pTexelBufferView = nullptr;
Jamie Madill76e471e2017-10-21 09:56:01 -0400826 }
827
828 VkDevice device = contextVk->getDevice();
829
Jamie Madill33318de2018-05-01 11:22:54 -0400830 vkUpdateDescriptorSets(device, 2, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400831
Jamie Madill21061022018-07-12 23:56:30 -0400832 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400833}
834
Jamie Madill5547b382017-10-23 18:16:01 -0400835const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400836{
Jamie Madill5547b382017-10-23 18:16:01 -0400837 return mDescriptorSets;
838}
839
Luc Ferron7a06ac12018-03-15 10:17:04 -0400840const uint32_t *ProgramVk::getDynamicOffsets()
841{
842 // If we have no descriptor set being used, we do not need to specify any offsets when binding
843 // the descriptor sets.
844 if (!mUsedDescriptorSetRange.contains(0))
845 return nullptr;
846
847 return mUniformBlocksOffsets.data();
848}
849
850uint32_t ProgramVk::getDynamicOffsetsCount()
851{
852 if (!mUsedDescriptorSetRange.contains(0))
853 return 0;
854
855 return static_cast<uint32_t>(mUniformBlocksOffsets.size());
856}
857
Jamie Madill8c3988c2017-12-21 14:44:56 -0500858const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400859{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500860 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400861}
862
Jamie Madill84c662b2018-07-12 15:56:55 -0400863angle::Result ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
Jamie Madill5547b382017-10-23 18:16:01 -0400864{
865 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
866 {
Jamie Madill84c662b2018-07-12 15:56:55 -0400867 return angle::Result::Continue();
Jamie Madill5547b382017-10-23 18:16:01 -0400868 }
869
Jamie Madillc7918ce2018-06-13 13:25:31 -0400870 ANGLE_TRY(allocateDescriptorSet(contextVk, kTextureDescriptorSetIndex));
Luc Ferron6ea1b412018-03-21 16:13:01 -0400871
Jamie Madill8c3988c2017-12-21 14:44:56 -0500872 ASSERT(mUsedDescriptorSetRange.contains(1));
Jamie Madillc7918ce2018-06-13 13:25:31 -0400873 VkDescriptorSet descriptorSet = mDescriptorSets[kTextureDescriptorSetIndex];
Jamie Madill5547b382017-10-23 18:16:01 -0400874
Jamie Madill84c662b2018-07-12 15:56:55 -0400875 gl::ActiveTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
876 gl::ActiveTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill4cc753e2018-06-13 13:25:33 -0400877 uint32_t writeCount = 0;
Jamie Madill5547b382017-10-23 18:16:01 -0400878
Jamie Madill84c662b2018-07-12 15:56:55 -0400879 const gl::ActiveTextureArray<TextureVk *> &activeTextures = contextVk->getActiveTextures();
Jamie Madill5547b382017-10-23 18:16:01 -0400880
Jamie Madill4cc753e2018-06-13 13:25:33 -0400881 for (uint32_t textureIndex = 0; textureIndex < mState.getSamplerBindings().size();
882 ++textureIndex)
Jamie Madill5547b382017-10-23 18:16:01 -0400883 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400884 const gl::SamplerBinding &samplerBinding = mState.getSamplerBindings()[textureIndex];
885
Jamie Madill5547b382017-10-23 18:16:01 -0400886 ASSERT(!samplerBinding.unreferenced);
887
Jamie Madill4cc753e2018-06-13 13:25:33 -0400888 for (uint32_t arrayElement = 0; arrayElement < samplerBinding.boundTextureUnits.size();
889 ++arrayElement)
Luc Ferron90968362018-05-04 08:47:22 -0400890 {
Jamie Madill84c662b2018-07-12 15:56:55 -0400891 GLuint textureUnit = samplerBinding.boundTextureUnits[arrayElement];
892 TextureVk *textureVk = activeTextures[textureUnit];
Jamie Madill4cc753e2018-06-13 13:25:33 -0400893 const vk::ImageHelper &image = textureVk->getImage();
894
895 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[writeCount];
896
897 imageInfo.sampler = textureVk->getSampler().getHandle();
898 imageInfo.imageView = textureVk->getImageView().getHandle();
899 imageInfo.imageLayout = image.getCurrentLayout();
900
901 VkWriteDescriptorSet &writeInfo = writeDescriptorInfo[writeCount];
902
903 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
904 writeInfo.pNext = nullptr;
905 writeInfo.dstSet = descriptorSet;
906 writeInfo.dstBinding = textureIndex;
907 writeInfo.dstArrayElement = arrayElement;
908 writeInfo.descriptorCount = 1;
909 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
910 writeInfo.pImageInfo = &imageInfo;
911 writeInfo.pBufferInfo = nullptr;
912 writeInfo.pTexelBufferView = nullptr;
913
914 writeCount++;
Luc Ferron90968362018-05-04 08:47:22 -0400915 }
Jamie Madill5547b382017-10-23 18:16:01 -0400916 }
917
918 VkDevice device = contextVk->getDevice();
919
Jamie Madill4cc753e2018-06-13 13:25:33 -0400920 ASSERT(writeCount > 0);
921 vkUpdateDescriptorSets(device, writeCount, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill5547b382017-10-23 18:16:01 -0400922
923 mDirtyTextures = false;
Jamie Madill84c662b2018-07-12 15:56:55 -0400924 return angle::Result::Continue();
Jamie Madill5547b382017-10-23 18:16:01 -0400925}
926
927void ProgramVk::invalidateTextures()
928{
929 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400930}
931
Jamie Madill9b168d02018-06-13 13:25:32 -0400932const vk::PipelineLayout &ProgramVk::getPipelineLayout() const
933{
934 return mPipelineLayout.get();
935}
936
Luc Ferron7a06ac12018-03-15 10:17:04 -0400937void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
938{
939 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
940 {
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400941 block.storage.setMinimumSizeForTesting(minSize);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400942 }
943}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400944} // namespace rx