blob: d180b4ad7c8de434426fb4a89801ea310fbc86e6 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Luc Ferron48cdc2e2018-05-31 09:58:34 -040015#include "libANGLE/renderer/renderer_utils.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050016#include "libANGLE/renderer/vulkan/ContextVk.h"
17#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
18#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040019#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040020
21namespace rx
22{
23
Jamie Madill76e471e2017-10-21 09:56:01 -040024namespace
25{
26
Jamie Madillf3614372018-03-31 14:19:14 -040027constexpr size_t kUniformBlockDynamicBufferMinSize = 256 * 128;
Luc Ferron7a06ac12018-03-15 10:17:04 -040028
Jamie Madill76e471e2017-10-21 09:56:01 -040029gl::Error InitDefaultUniformBlock(const gl::Context *context,
Jamie Madill76e471e2017-10-21 09:56:01 -040030 gl::Shader *shader,
Jamie Madill76e471e2017-10-21 09:56:01 -040031 sh::BlockLayoutMap *blockLayoutMapOut,
Luc Ferron7a06ac12018-03-15 10:17:04 -040032 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040033{
34 const auto &uniforms = shader->getUniforms(context);
35
36 if (uniforms.empty())
37 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040038 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040039 return gl::NoError();
40 }
41
42 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020043 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040044
45 size_t blockSize = blockEncoder.getBlockSize();
46
47 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
48 if (blockSize == 0)
49 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040050 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040051 return gl::NoError();
52 }
53
Luc Ferron7a06ac12018-03-15 10:17:04 -040054 *blockSizeOut = blockSize;
Jamie Madill76e471e2017-10-21 09:56:01 -040055 return gl::NoError();
56}
57
58template <typename T>
59void UpdateDefaultUniformBlock(GLsizei count,
Luc Ferron2371aca2018-03-27 16:03:03 -040060 uint32_t arrayIndex,
Jamie Madill76e471e2017-10-21 09:56:01 -040061 int componentCount,
62 const T *v,
63 const sh::BlockMemberInfo &layoutInfo,
64 angle::MemoryBuffer *uniformData)
65{
Luc Ferron2371aca2018-03-27 16:03:03 -040066 const int elementSize = sizeof(T) * componentCount;
67
68 uint8_t *dst = uniformData->data() + layoutInfo.offset;
Jamie Madill76e471e2017-10-21 09:56:01 -040069 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
70 {
Luc Ferron2371aca2018-03-27 16:03:03 -040071 uint32_t arrayOffset = arrayIndex * layoutInfo.arrayStride;
72 uint8_t *writePtr = dst + arrayOffset;
Jamie Madill76e471e2017-10-21 09:56:01 -040073 memcpy(writePtr, v, elementSize * count);
74 }
75 else
76 {
Luc Ferron2371aca2018-03-27 16:03:03 -040077 // Have to respect the arrayStride between each element of the array.
78 int maxIndex = arrayIndex + count;
79 for (int writeIndex = arrayIndex, readIndex = 0; writeIndex < maxIndex;
80 writeIndex++, readIndex++)
81 {
82 const int arrayOffset = writeIndex * layoutInfo.arrayStride;
83 uint8_t *writePtr = dst + arrayOffset;
Luc Ferrone9465a62018-06-04 10:41:52 -040084 const T *readPtr = v + (readIndex * componentCount);
Luc Ferron2371aca2018-03-27 16:03:03 -040085 memcpy(writePtr, readPtr, elementSize);
86 }
Jamie Madill76e471e2017-10-21 09:56:01 -040087 }
88}
89
Luc Ferron7cec3352018-03-13 13:29:34 -040090template <typename T>
91void ReadFromDefaultUniformBlock(int componentCount,
Luc Ferron2371aca2018-03-27 16:03:03 -040092 uint32_t arrayIndex,
Luc Ferron7cec3352018-03-13 13:29:34 -040093 T *dst,
94 const sh::BlockMemberInfo &layoutInfo,
95 const angle::MemoryBuffer *uniformData)
96{
97 ASSERT(layoutInfo.offset != -1);
98
Luc Ferron2371aca2018-03-27 16:03:03 -040099 const int elementSize = sizeof(T) * componentCount;
100 const uint8_t *source = uniformData->data() + layoutInfo.offset;
101
Luc Ferron7cec3352018-03-13 13:29:34 -0400102 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
103 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400104 const uint8_t *readPtr = source + arrayIndex * layoutInfo.arrayStride;
Luc Ferron7cec3352018-03-13 13:29:34 -0400105 memcpy(dst, readPtr, elementSize);
106 }
107 else
108 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400109 // Have to respect the arrayStride between each element of the array.
110 const int arrayOffset = arrayIndex * layoutInfo.arrayStride;
111 const uint8_t *readPtr = source + arrayOffset;
112 memcpy(dst, readPtr, elementSize);
Luc Ferron7cec3352018-03-13 13:29:34 -0400113 }
114}
115
Jamie Madillc3755fc2018-04-05 08:39:13 -0400116vk::Error SyncDefaultUniformBlock(RendererVk *renderer,
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400117 vk::DynamicBuffer *dynamicBuffer,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400118 const angle::MemoryBuffer &bufferData,
119 uint32_t *outOffset,
120 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400121{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400122 ASSERT(!bufferData.empty());
123 uint8_t *data = nullptr;
124 VkBuffer *outBuffer = nullptr;
125 uint32_t offset;
Jamie Madillc3755fc2018-04-05 08:39:13 -0400126 ANGLE_TRY(dynamicBuffer->allocate(renderer, bufferData.size(), &data, outBuffer, &offset,
127 outBufferModified));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400128 *outOffset = offset;
129 memcpy(data, bufferData.data(), bufferData.size());
Jamie Madillc3755fc2018-04-05 08:39:13 -0400130 ANGLE_TRY(dynamicBuffer->flush(renderer->getDevice()));
Jamie Madill76e471e2017-10-21 09:56:01 -0400131 return vk::NoError();
132}
Jamie Madill76e471e2017-10-21 09:56:01 -0400133} // anonymous namespace
134
135ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400136 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
Jamie Madillf3614372018-03-31 14:19:14 -0400137 kUniformBlockDynamicBufferMinSize),
Luc Ferron7a06ac12018-03-15 10:17:04 -0400138 uniformData(),
139 uniformsDirty(false),
140 uniformLayout()
Jamie Madill76e471e2017-10-21 09:56:01 -0400141{
142}
143
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500144ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
145{
146}
147
Jamie Madill76e471e2017-10-21 09:56:01 -0400148ProgramVk::ProgramVk(const gl::ProgramState &state)
Luc Ferron7a06ac12018-03-15 10:17:04 -0400149 : ProgramImpl(state),
150 mDefaultUniformBlocks(),
151 mUniformBlocksOffsets(),
152 mUsedDescriptorSetRange(),
153 mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400154{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400155 mUniformBlocksOffsets.fill(0);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500156 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400157}
158
159ProgramVk::~ProgramVk()
160{
161}
162
Jamie Madillb7d924a2018-03-10 11:16:54 -0500163gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500164{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500165 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500166 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400167}
Jamie Madill5deea722017-02-16 10:44:46 -0500168
Jamie Madillb7d924a2018-03-10 11:16:54 -0500169vk::Error ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400170{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500171 VkDevice device = contextVk->getDevice();
172
Jamie Madill9b168d02018-06-13 13:25:32 -0400173 for (auto &descriptorSetLayout : mDescriptorSetLayouts)
174 {
175 descriptorSetLayout.reset();
176 }
177 mPipelineLayout.reset();
178
Jamie Madillcaaff162018-06-22 08:55:37 -0400179 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400180 for (auto &uniformBlock : mDefaultUniformBlocks)
181 {
Jamie Madillcaaff162018-06-22 08:55:37 -0400182 uniformBlock.storage.release(renderer);
Jamie Madill76e471e2017-10-21 09:56:01 -0400183 }
184
Jamie Madillcaaff162018-06-22 08:55:37 -0400185 Serial currentSerial = renderer->getCurrentQueueSerial();
186 renderer->releaseObject(currentSerial, &mEmptyUniformBlockStorage.memory);
187 renderer->releaseObject(currentSerial, &mEmptyUniformBlockStorage.buffer);
Jamie Madill76e471e2017-10-21 09:56:01 -0400188
Jamie Madill5deea722017-02-16 10:44:46 -0500189 mLinkedFragmentModule.destroy(device);
190 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500191 mVertexModuleSerial = Serial();
192 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400193
Jamie Madill5547b382017-10-23 18:16:01 -0400194 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500195 mUsedDescriptorSetRange.invalidate();
Jamie Madill50cf2be2018-06-15 09:46:57 -0400196 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500197
198 return vk::NoError();
Jamie Madill5deea722017-02-16 10:44:46 -0500199}
200
Jamie Madill9cf9e872017-06-05 12:59:25 -0400201gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
202 gl::InfoLog &infoLog,
203 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400204{
205 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500206 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400207}
208
Jamie Madill27a60632017-06-30 15:12:01 -0400209void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400210{
211 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400212}
213
214void ProgramVk::setBinaryRetrievableHint(bool retrievable)
215{
216 UNIMPLEMENTED();
217}
218
Yunchao He61afff12017-03-14 15:34:03 +0800219void ProgramVk::setSeparable(bool separable)
220{
221 UNIMPLEMENTED();
222}
223
Jamie Madill9cf9e872017-06-05 12:59:25 -0400224gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500225 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400226 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400227{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400228 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400229 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500230 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400231 VkDevice device = renderer->getDevice();
232
Jamie Madillb7d924a2018-03-10 11:16:54 -0500233 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500234
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500235 std::vector<uint32_t> vertexCode;
236 std::vector<uint32_t> fragmentCode;
237 bool linkSuccess = false;
Luc Ferronc252d752018-06-14 09:32:40 -0400238 ANGLE_TRY_RESULT(glslangWrapper->linkProgram(glContext, mState, resources, glContext->getCaps(),
239 &vertexCode, &fragmentCode),
240 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500241 if (!linkSuccess)
242 {
243 return false;
244 }
245
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500246 {
247 VkShaderModuleCreateInfo vertexShaderInfo;
248 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
249 vertexShaderInfo.pNext = nullptr;
250 vertexShaderInfo.flags = 0;
251 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
252 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400253
254 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madill78feddc2018-04-27 11:45:05 -0400255 mVertexModuleSerial = renderer->issueShaderSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500256 }
257
258 {
259 VkShaderModuleCreateInfo fragmentShaderInfo;
260 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
261 fragmentShaderInfo.pNext = nullptr;
262 fragmentShaderInfo.flags = 0;
263 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
264 fragmentShaderInfo.pCode = fragmentCode.data();
265
Jamie Madillc5143482017-10-15 20:20:06 -0400266 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madill78feddc2018-04-27 11:45:05 -0400267 mFragmentModuleSerial = renderer->issueShaderSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500268 }
269
Jamie Madill76e471e2017-10-21 09:56:01 -0400270 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500271
Jamie Madill8c3988c2017-12-21 14:44:56 -0500272 if (!mState.getSamplerUniformRange().empty())
273 {
274 // Ensure the descriptor set range includes the textures at position 1.
Jamie Madill9b168d02018-06-13 13:25:32 -0400275 mUsedDescriptorSetRange.extend(kTextureDescriptorSetIndex);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500276 mDirtyTextures = true;
277 }
278
Jamie Madill9b168d02018-06-13 13:25:32 -0400279 // Store a reference to the pipeline and descriptor set layouts. This will create them if they
280 // don't already exist in the cache.
281 vk::DescriptorSetLayoutDesc uniformsSetDesc;
282 uniformsSetDesc.update(kVertexUniformsBindingIndex, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
283 1);
284 uniformsSetDesc.update(kFragmentUniformsBindingIndex, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
285 1);
286
287 ANGLE_TRY(renderer->getDescriptorSetLayout(
288 uniformsSetDesc, &mDescriptorSetLayouts[kUniformsDescriptorSetIndex]));
289
Jamie Madill9b168d02018-06-13 13:25:32 -0400290 vk::DescriptorSetLayoutDesc texturesSetDesc;
Jamie Madill4cc753e2018-06-13 13:25:33 -0400291
292 for (uint32_t textureIndex = 0; textureIndex < mState.getSamplerBindings().size();
293 ++textureIndex)
Jamie Madill9b168d02018-06-13 13:25:32 -0400294 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400295 const gl::SamplerBinding &samplerBinding = mState.getSamplerBindings()[textureIndex];
296
297 // The front-end always binds array sampler units sequentially.
298 const uint32_t count = static_cast<uint32_t>(samplerBinding.boundTextureUnits.size());
299 texturesSetDesc.update(textureIndex, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, count);
Jamie Madill9b168d02018-06-13 13:25:32 -0400300 }
301
302 ANGLE_TRY(renderer->getDescriptorSetLayout(texturesSetDesc,
303 &mDescriptorSetLayouts[kTextureDescriptorSetIndex]));
304
Jamie Madillb01b4802018-07-10 12:43:57 -0400305 vk::DescriptorSetLayoutDesc driverUniformsSetDesc;
306 driverUniformsSetDesc.update(0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1);
307 ANGLE_TRY(renderer->getDescriptorSetLayout(
308 driverUniformsSetDesc, &mDescriptorSetLayouts[kDriverUniformsDescriptorSetIndex]));
309
Jamie Madill9b168d02018-06-13 13:25:32 -0400310 vk::PipelineLayoutDesc pipelineLayoutDesc;
311 pipelineLayoutDesc.updateDescriptorSetLayout(kUniformsDescriptorSetIndex, uniformsSetDesc);
312 pipelineLayoutDesc.updateDescriptorSetLayout(kTextureDescriptorSetIndex, texturesSetDesc);
Jamie Madillb01b4802018-07-10 12:43:57 -0400313 pipelineLayoutDesc.updateDescriptorSetLayout(kDriverUniformsDescriptorSetIndex,
314 driverUniformsSetDesc);
Jamie Madill9b168d02018-06-13 13:25:32 -0400315
316 ANGLE_TRY(
317 renderer->getPipelineLayout(pipelineLayoutDesc, mDescriptorSetLayouts, &mPipelineLayout));
318
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500319 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400320}
321
Jamie Madill76e471e2017-10-21 09:56:01 -0400322gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
323{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400324 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500325 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400326 VkDevice device = contextVk->getDevice();
327
328 // Process vertex and fragment uniforms into std140 packing.
Jamie Madill33318de2018-05-01 11:22:54 -0400329 vk::ShaderMap<sh::BlockLayoutMap> layoutMap;
330 vk::ShaderMap<size_t> requiredBufferSize;
331 requiredBufferSize.fill(0);
Jamie Madill76e471e2017-10-21 09:56:01 -0400332
Jamie Madill33318de2018-05-01 11:22:54 -0400333 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400334 {
Jamie Madill33318de2018-05-01 11:22:54 -0400335 gl::ShaderType glShaderType = static_cast<gl::ShaderType>(shaderType);
336 ANGLE_TRY(InitDefaultUniformBlock(glContext, mState.getAttachedShader(glShaderType),
337 &layoutMap[shaderType], &requiredBufferSize[shaderType]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400338 }
339
340 // Init the default block layout info.
341 const auto &locations = mState.getUniformLocations();
342 const auto &uniforms = mState.getUniforms();
343 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
344 {
Jamie Madill33318de2018-05-01 11:22:54 -0400345 vk::ShaderMap<sh::BlockMemberInfo> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400346
347 const auto &location = locations[locationIndex];
348 if (location.used() && !location.ignored)
349 {
Jamie Madillde03e002017-10-21 14:04:20 -0400350 const auto &uniform = uniforms[location.index];
351
352 if (uniform.isSampler())
353 continue;
354
Jamie Madill76e471e2017-10-21 09:56:01 -0400355 std::string uniformName = uniform.name;
356 if (uniform.isArray())
357 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400358 // Gets the uniform name without the [0] at the end.
359 uniformName = gl::ParseResourceName(uniformName, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400360 }
361
362 bool found = false;
363
Jamie Madill33318de2018-05-01 11:22:54 -0400364 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400365 {
Jamie Madill33318de2018-05-01 11:22:54 -0400366 auto it = layoutMap[shaderType].find(uniformName);
367 if (it != layoutMap[shaderType].end())
Jamie Madill76e471e2017-10-21 09:56:01 -0400368 {
Jamie Madill50cf2be2018-06-15 09:46:57 -0400369 found = true;
370 layoutInfo[shaderType] = it->second;
Jamie Madill76e471e2017-10-21 09:56:01 -0400371 }
372 }
373
374 ASSERT(found);
375 }
376
Jamie Madill33318de2018-05-01 11:22:54 -0400377 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400378 {
Jamie Madill33318de2018-05-01 11:22:54 -0400379 mDefaultUniformBlocks[shaderType].uniformLayout.push_back(layoutInfo[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400380 }
381 }
382
383 bool anyDirty = false;
384 bool allDirty = true;
385
Jamie Madill33318de2018-05-01 11:22:54 -0400386 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400387 {
Jamie Madill33318de2018-05-01 11:22:54 -0400388 if (requiredBufferSize[shaderType] > 0)
Jamie Madill76e471e2017-10-21 09:56:01 -0400389 {
Jamie Madill33318de2018-05-01 11:22:54 -0400390 if (!mDefaultUniformBlocks[shaderType].uniformData.resize(
391 requiredBufferSize[shaderType]))
Jamie Madill76e471e2017-10-21 09:56:01 -0400392 {
393 return gl::OutOfMemory() << "Memory allocation failure.";
394 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400395 size_t minAlignment = static_cast<size_t>(
396 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
397
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400398 mDefaultUniformBlocks[shaderType].storage.init(minAlignment, renderer);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400399
400 // Initialize uniform buffer memory to zero by default.
Jamie Madill33318de2018-05-01 11:22:54 -0400401 mDefaultUniformBlocks[shaderType].uniformData.fill(0);
402 mDefaultUniformBlocks[shaderType].uniformsDirty = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400403
404 anyDirty = true;
405 }
406 else
407 {
408 allDirty = false;
409 }
410 }
411
412 if (anyDirty)
413 {
414 // Initialize the "empty" uniform block if necessary.
415 if (!allDirty)
416 {
417 VkBufferCreateInfo uniformBufferInfo;
418 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
419 uniformBufferInfo.pNext = nullptr;
420 uniformBufferInfo.flags = 0;
421 uniformBufferInfo.size = 1;
422 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
423 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
424 uniformBufferInfo.queueFamilyIndexCount = 0;
425 uniformBufferInfo.pQueueFamilyIndices = nullptr;
426
427 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
428
Luc Ferron7a06ac12018-03-15 10:17:04 -0400429 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500430 VkMemoryPropertyFlags flags =
431 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500432 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Luc Ferrona72ebeb2018-06-28 11:16:58 -0400433 &mEmptyUniformBlockStorage.memory));
Jamie Madill76e471e2017-10-21 09:56:01 -0400434 }
435
Jamie Madill8c3988c2017-12-21 14:44:56 -0500436 // Ensure the descriptor set range includes the uniform buffers at position 0.
437 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400438 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400439
440 return gl::NoError();
441}
442
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400443GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
444{
Luc Ferronfba1f612018-06-04 14:37:17 -0400445 // No-op. The spec is very vague about the behavior of validation.
446 return GL_TRUE;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400447}
448
Jamie Madill76e471e2017-10-21 09:56:01 -0400449template <typename T>
450void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
451{
452 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
453 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
454
Luc Ferron7cec3352018-03-13 13:29:34 -0400455 if (linkedUniform.isSampler())
456 {
Jamie Madill0cb6dc42018-04-16 10:36:39 -0400457 // We could potentially cache some indexing here. For now this is a no-op since the mapping
458 // is handled entirely in ContextVk.
Luc Ferron7cec3352018-03-13 13:29:34 -0400459 return;
460 }
461
Luc Ferron24a31372018-04-04 11:49:14 -0400462 if (linkedUniform.typeInfo->type == entryPointType)
Jamie Madill76e471e2017-10-21 09:56:01 -0400463 {
Luc Ferron24a31372018-04-04 11:49:14 -0400464 for (auto &uniformBlock : mDefaultUniformBlocks)
Jamie Madill76e471e2017-10-21 09:56:01 -0400465 {
Luc Ferron24a31372018-04-04 11:49:14 -0400466 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400467
Luc Ferron24a31372018-04-04 11:49:14 -0400468 // Assume an offset of -1 means the block is unused.
469 if (layoutInfo.offset == -1)
470 {
471 continue;
472 }
473
474 const GLint componentCount = linkedUniform.typeInfo->componentCount;
Luc Ferron62059a52018-03-29 07:01:35 -0400475 UpdateDefaultUniformBlock(count, locationInfo.arrayIndex, componentCount, v, layoutInfo,
476 &uniformBlock.uniformData);
Luc Ferron24a31372018-04-04 11:49:14 -0400477 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400478 }
Luc Ferron24a31372018-04-04 11:49:14 -0400479 }
480 else
481 {
482 for (auto &uniformBlock : mDefaultUniformBlocks)
Luc Ferron62059a52018-03-29 07:01:35 -0400483 {
Luc Ferron24a31372018-04-04 11:49:14 -0400484 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
485
486 // Assume an offset of -1 means the block is unused.
487 if (layoutInfo.offset == -1)
488 {
489 continue;
490 }
491
492 const GLint componentCount = linkedUniform.typeInfo->componentCount;
493
Luc Ferron62059a52018-03-29 07:01:35 -0400494 ASSERT(linkedUniform.typeInfo->type == gl::VariableBoolVectorType(entryPointType));
495
Luc Ferrond91c3792018-04-06 09:36:36 -0400496 GLint initialArrayOffset =
497 locationInfo.arrayIndex * layoutInfo.arrayStride + layoutInfo.offset;
Luc Ferron62059a52018-03-29 07:01:35 -0400498 for (GLint i = 0; i < count; i++)
499 {
500 GLint elementOffset = i * layoutInfo.arrayStride + initialArrayOffset;
501 GLint *dest =
502 reinterpret_cast<GLint *>(uniformBlock.uniformData.data() + elementOffset);
503 const T *source = v + i * componentCount;
504
505 for (int c = 0; c < componentCount; c++)
506 {
507 dest[c] = (source[c] == static_cast<T>(0)) ? GL_FALSE : GL_TRUE;
508 }
509 }
Luc Ferron24a31372018-04-04 11:49:14 -0400510 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400511 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400512 }
513}
514
Luc Ferron7cec3352018-03-13 13:29:34 -0400515template <typename T>
516void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
517{
518 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
519 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
520
Jamie Madill3bb2bbe2018-06-15 09:47:03 -0400521 ASSERT(!linkedUniform.isSampler());
Luc Ferron7cec3352018-03-13 13:29:34 -0400522
Olli Etuaho107c7242018-03-20 15:45:35 +0200523 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800524 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400525
Jiawei Shao385b3e02018-03-21 09:43:28 +0800526 const DefaultUniformBlock &uniformBlock =
Jamie Madill33318de2018-05-01 11:22:54 -0400527 mDefaultUniformBlocks[static_cast<vk::ShaderType>(shaderType)];
Jamie Madill50cf2be2018-06-15 09:46:57 -0400528 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400529
530 ASSERT(linkedUniform.typeInfo->componentType == entryPointType ||
531 linkedUniform.typeInfo->componentType == gl::VariableBoolVectorType(entryPointType));
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400532
533 if (gl::IsMatrixType(linkedUniform.type))
534 {
535 const uint8_t *ptrToElement = uniformBlock.uniformData.data() + layoutInfo.offset +
Luc Ferronf6fd48f2018-06-18 08:11:27 -0400536 (locationInfo.arrayIndex * layoutInfo.arrayStride);
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400537 GetMatrixUniform(linkedUniform.type, v, reinterpret_cast<const T *>(ptrToElement), false);
538 }
539 else
540 {
541 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, locationInfo.arrayIndex,
542 v, layoutInfo, &uniformBlock.uniformData);
543 }
Luc Ferron7cec3352018-03-13 13:29:34 -0400544}
545
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400546void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
547{
Jamie Madill76e471e2017-10-21 09:56:01 -0400548 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400549}
550
551void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
552{
Jamie Madill76e471e2017-10-21 09:56:01 -0400553 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400554}
555
556void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
557{
Jamie Madill76e471e2017-10-21 09:56:01 -0400558 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400559}
560
561void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
562{
Jamie Madill76e471e2017-10-21 09:56:01 -0400563 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400564}
565
566void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
567{
Luc Ferron7cec3352018-03-13 13:29:34 -0400568 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400569}
570
571void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
572{
Luc Ferron489243f2018-03-28 16:55:28 -0400573 setUniformImpl(location, count, v, GL_INT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400574}
575
576void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
577{
Luc Ferron489243f2018-03-28 16:55:28 -0400578 setUniformImpl(location, count, v, GL_INT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400579}
580
581void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
582{
Luc Ferron489243f2018-03-28 16:55:28 -0400583 setUniformImpl(location, count, v, GL_INT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400584}
585
586void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
587{
588 UNIMPLEMENTED();
589}
590
591void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
592{
593 UNIMPLEMENTED();
594}
595
596void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
597{
598 UNIMPLEMENTED();
599}
600
601void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
602{
603 UNIMPLEMENTED();
604}
605
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400606template <int cols, int rows>
607void ProgramVk::setUniformMatrixfv(GLint location,
608 GLsizei count,
609 GLboolean transpose,
610 const GLfloat *value)
611{
612 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
613 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
614
615 for (auto &uniformBlock : mDefaultUniformBlocks)
616 {
617 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
618
619 // Assume an offset of -1 means the block is unused.
620 if (layoutInfo.offset == -1)
621 {
622 continue;
623 }
624
Luc Ferronc8fbff32018-06-04 10:30:48 -0400625 bool updated = SetFloatUniformMatrix<cols, rows>(
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400626 locationInfo.arrayIndex, linkedUniform.getArraySizeProduct(), count, transpose, value,
627 uniformBlock.uniformData.data() + layoutInfo.offset);
Luc Ferronc8fbff32018-06-04 10:30:48 -0400628
629 // If the uniformsDirty flag was true, we don't want to flip it to false here if the
630 // setter did not update any data. We still want the uniform to be included when we'll
631 // update the descriptor sets.
632 uniformBlock.uniformsDirty = uniformBlock.uniformsDirty || updated;
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400633 }
634}
635
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400636void ProgramVk::setUniformMatrix2fv(GLint location,
637 GLsizei count,
638 GLboolean transpose,
639 const GLfloat *value)
640{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400641 setUniformMatrixfv<2, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400642}
643
644void ProgramVk::setUniformMatrix3fv(GLint location,
645 GLsizei count,
646 GLboolean transpose,
647 const GLfloat *value)
648{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400649 setUniformMatrixfv<3, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400650}
651
652void ProgramVk::setUniformMatrix4fv(GLint location,
653 GLsizei count,
654 GLboolean transpose,
655 const GLfloat *value)
656{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400657 setUniformMatrixfv<4, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400658}
659
660void ProgramVk::setUniformMatrix2x3fv(GLint location,
661 GLsizei count,
662 GLboolean transpose,
663 const GLfloat *value)
664{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400665 setUniformMatrixfv<2, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400666}
667
668void ProgramVk::setUniformMatrix3x2fv(GLint location,
669 GLsizei count,
670 GLboolean transpose,
671 const GLfloat *value)
672{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400673 setUniformMatrixfv<3, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400674}
675
676void ProgramVk::setUniformMatrix2x4fv(GLint location,
677 GLsizei count,
678 GLboolean transpose,
679 const GLfloat *value)
680{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400681 setUniformMatrixfv<2, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400682}
683
684void ProgramVk::setUniformMatrix4x2fv(GLint location,
685 GLsizei count,
686 GLboolean transpose,
687 const GLfloat *value)
688{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400689 setUniformMatrixfv<4, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400690}
691
692void ProgramVk::setUniformMatrix3x4fv(GLint location,
693 GLsizei count,
694 GLboolean transpose,
695 const GLfloat *value)
696{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400697 setUniformMatrixfv<3, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400698}
699
700void ProgramVk::setUniformMatrix4x3fv(GLint location,
701 GLsizei count,
702 GLboolean transpose,
703 const GLfloat *value)
704{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400705 setUniformMatrixfv<4, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400706}
707
708void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
709{
710 UNIMPLEMENTED();
711}
712
Sami Väisänen46eaa942016-06-29 10:26:37 +0300713void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
714 GLenum genMode,
715 GLint components,
716 const GLfloat *coeffs)
717{
718 UNIMPLEMENTED();
719}
720
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500721const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
722{
723 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
724 return mLinkedVertexModule;
725}
726
Jamie Madillf2f6d372018-01-10 21:37:23 -0500727Serial ProgramVk::getVertexModuleSerial() const
728{
729 return mVertexModuleSerial;
730}
731
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500732const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
733{
734 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
735 return mLinkedFragmentModule;
736}
737
Jamie Madillf2f6d372018-01-10 21:37:23 -0500738Serial ProgramVk::getFragmentModuleSerial() const
739{
740 return mFragmentModuleSerial;
741}
742
Luc Ferron6ea1b412018-03-21 16:13:01 -0400743vk::Error ProgramVk::allocateDescriptorSet(ContextVk *contextVk, uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400744{
Jamie Madill76e471e2017-10-21 09:56:01 -0400745 // Write out to a new a descriptor set.
Jamie Madilledeaa832018-06-22 09:18:41 -0400746 vk::DynamicDescriptorPool *dynamicDescriptorPool =
747 contextVk->getDynamicDescriptorPool(descriptorSetIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400748
Luc Ferron6ea1b412018-03-21 16:13:01 -0400749 uint32_t potentialNewCount = descriptorSetIndex + 1;
750 if (potentialNewCount > mDescriptorSets.size())
751 {
752 mDescriptorSets.resize(potentialNewCount, VK_NULL_HANDLE);
753 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400754
Jamie Madillc7918ce2018-06-13 13:25:31 -0400755 const vk::DescriptorSetLayout &descriptorSetLayout =
Jamie Madill9b168d02018-06-13 13:25:32 -0400756 mDescriptorSetLayouts[descriptorSetIndex].get();
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400757 ANGLE_TRY(dynamicDescriptorPool->allocateSets(contextVk, descriptorSetLayout.ptr(), 1,
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400758 &mDescriptorSets[descriptorSetIndex]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400759 return vk::NoError();
760}
761
Jamie Madill54164b02017-08-28 15:17:37 -0400762void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
763{
Luc Ferron7cec3352018-03-13 13:29:34 -0400764 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400765}
766
767void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
768{
Luc Ferron7cec3352018-03-13 13:29:34 -0400769 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400770}
771
772void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
773{
774 UNIMPLEMENTED();
775}
776
Jamie Madill76e471e2017-10-21 09:56:01 -0400777vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
778{
Jamie Madill33318de2018-05-01 11:22:54 -0400779 if (!mDefaultUniformBlocks[vk::ShaderType::VertexShader].uniformsDirty &&
780 !mDefaultUniformBlocks[vk::ShaderType::FragmentShader].uniformsDirty)
Jamie Madill76e471e2017-10-21 09:56:01 -0400781 {
782 return vk::NoError();
783 }
784
Jamie Madill8c3988c2017-12-21 14:44:56 -0500785 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400786
Jamie Madill76e471e2017-10-21 09:56:01 -0400787 // Update buffer memory by immediate mapping. This immediate update only works once.
788 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400789 bool anyNewBufferAllocated = false;
Jamie Madill33318de2018-05-01 11:22:54 -0400790 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400791 {
Jamie Madill33318de2018-05-01 11:22:54 -0400792 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400793
Jamie Madill76e471e2017-10-21 09:56:01 -0400794 if (uniformBlock.uniformsDirty)
795 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400796 bool bufferModified = false;
Jamie Madillc3755fc2018-04-05 08:39:13 -0400797 ANGLE_TRY(SyncDefaultUniformBlock(contextVk->getRenderer(), &uniformBlock.storage,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400798 uniformBlock.uniformData,
Jamie Madill33318de2018-05-01 11:22:54 -0400799 &mUniformBlocksOffsets[shaderType], &bufferModified));
Jamie Madill76e471e2017-10-21 09:56:01 -0400800 uniformBlock.uniformsDirty = false;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400801
802 if (bufferModified)
803 {
804 anyNewBufferAllocated = true;
805 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400806 }
807 }
808
Luc Ferron7a06ac12018-03-15 10:17:04 -0400809 if (anyNewBufferAllocated)
810 {
811 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
812 // modify the descriptor sets once initialized.
Jamie Madillc7918ce2018-06-13 13:25:31 -0400813 ANGLE_TRY(allocateDescriptorSet(contextVk, kUniformsDescriptorSetIndex));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400814 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
815 }
816
Jamie Madill76e471e2017-10-21 09:56:01 -0400817 return vk::NoError();
818}
819
820vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
821{
Jamie Madill33318de2018-05-01 11:22:54 -0400822 vk::ShaderMap<VkDescriptorBufferInfo> descriptorBufferInfo;
823 vk::ShaderMap<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400824
Jamie Madill33318de2018-05-01 11:22:54 -0400825 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400826 {
Jamie Madill33318de2018-05-01 11:22:54 -0400827 auto &uniformBlock = mDefaultUniformBlocks[shaderType];
828 auto &bufferInfo = descriptorBufferInfo[shaderType];
829 auto &writeInfo = writeDescriptorInfo[shaderType];
Jamie Madill76e471e2017-10-21 09:56:01 -0400830
831 if (!uniformBlock.uniformData.empty())
832 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400833 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400834 }
835 else
836 {
837 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
838 }
839
840 bufferInfo.offset = 0;
841 bufferInfo.range = VK_WHOLE_SIZE;
842
Jamie Madill76e471e2017-10-21 09:56:01 -0400843 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
844 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400845 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill33318de2018-05-01 11:22:54 -0400846 writeInfo.dstBinding = static_cast<uint32_t>(shaderType);
Jamie Madill76e471e2017-10-21 09:56:01 -0400847 writeInfo.dstArrayElement = 0;
848 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400849 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400850 writeInfo.pImageInfo = nullptr;
851 writeInfo.pBufferInfo = &bufferInfo;
852 writeInfo.pTexelBufferView = nullptr;
Jamie Madill76e471e2017-10-21 09:56:01 -0400853 }
854
855 VkDevice device = contextVk->getDevice();
856
Jamie Madill33318de2018-05-01 11:22:54 -0400857 vkUpdateDescriptorSets(device, 2, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400858
859 return vk::NoError();
860}
861
Jamie Madill5547b382017-10-23 18:16:01 -0400862const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400863{
Jamie Madill5547b382017-10-23 18:16:01 -0400864 return mDescriptorSets;
865}
866
Luc Ferron7a06ac12018-03-15 10:17:04 -0400867const uint32_t *ProgramVk::getDynamicOffsets()
868{
869 // If we have no descriptor set being used, we do not need to specify any offsets when binding
870 // the descriptor sets.
871 if (!mUsedDescriptorSetRange.contains(0))
872 return nullptr;
873
874 return mUniformBlocksOffsets.data();
875}
876
877uint32_t ProgramVk::getDynamicOffsetsCount()
878{
879 if (!mUsedDescriptorSetRange.contains(0))
880 return 0;
881
882 return static_cast<uint32_t>(mUniformBlocksOffsets.size());
883}
884
Jamie Madill8c3988c2017-12-21 14:44:56 -0500885const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400886{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500887 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400888}
889
Luc Ferron90968362018-05-04 08:47:22 -0400890gl::Error ProgramVk::updateTexturesDescriptorSet(const gl::Context *context)
Jamie Madill5547b382017-10-23 18:16:01 -0400891{
892 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
893 {
Luc Ferron90968362018-05-04 08:47:22 -0400894 return gl::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400895 }
896
Luc Ferron90968362018-05-04 08:47:22 -0400897 ContextVk *contextVk = GetImplAs<ContextVk>(context);
Jamie Madillc7918ce2018-06-13 13:25:31 -0400898 ANGLE_TRY(allocateDescriptorSet(contextVk, kTextureDescriptorSetIndex));
Luc Ferron6ea1b412018-03-21 16:13:01 -0400899
Jamie Madill8c3988c2017-12-21 14:44:56 -0500900 ASSERT(mUsedDescriptorSetRange.contains(1));
Jamie Madillc7918ce2018-06-13 13:25:31 -0400901 VkDescriptorSet descriptorSet = mDescriptorSets[kTextureDescriptorSetIndex];
Jamie Madill5547b382017-10-23 18:16:01 -0400902
903 // TODO(jmadill): Don't hard-code the texture limit.
904 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
905 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill4cc753e2018-06-13 13:25:33 -0400906 uint32_t writeCount = 0;
Jamie Madill5547b382017-10-23 18:16:01 -0400907
908 const gl::State &glState = contextVk->getGLState();
909 const auto &completeTextures = glState.getCompleteTextureCache();
910
Jamie Madill4cc753e2018-06-13 13:25:33 -0400911 for (uint32_t textureIndex = 0; textureIndex < mState.getSamplerBindings().size();
912 ++textureIndex)
Jamie Madill5547b382017-10-23 18:16:01 -0400913 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400914 const gl::SamplerBinding &samplerBinding = mState.getSamplerBindings()[textureIndex];
915
Jamie Madill5547b382017-10-23 18:16:01 -0400916 ASSERT(!samplerBinding.unreferenced);
917
Jamie Madill4cc753e2018-06-13 13:25:33 -0400918 for (uint32_t arrayElement = 0; arrayElement < samplerBinding.boundTextureUnits.size();
919 ++arrayElement)
Luc Ferron90968362018-05-04 08:47:22 -0400920 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400921 GLuint textureUnit = samplerBinding.boundTextureUnits[arrayElement];
922 gl::Texture *texture = completeTextures[textureUnit];
923
924 if (texture == nullptr)
925 {
926 // If we have an incomplete texture, fetch it from our renderer.
927 ANGLE_TRY(
928 contextVk->getIncompleteTexture(context, samplerBinding.textureType, &texture));
929 }
930
931 TextureVk *textureVk = vk::GetImpl(texture);
932 const vk::ImageHelper &image = textureVk->getImage();
933
934 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[writeCount];
935
936 imageInfo.sampler = textureVk->getSampler().getHandle();
937 imageInfo.imageView = textureVk->getImageView().getHandle();
938 imageInfo.imageLayout = image.getCurrentLayout();
939
940 VkWriteDescriptorSet &writeInfo = writeDescriptorInfo[writeCount];
941
942 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
943 writeInfo.pNext = nullptr;
944 writeInfo.dstSet = descriptorSet;
945 writeInfo.dstBinding = textureIndex;
946 writeInfo.dstArrayElement = arrayElement;
947 writeInfo.descriptorCount = 1;
948 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
949 writeInfo.pImageInfo = &imageInfo;
950 writeInfo.pBufferInfo = nullptr;
951 writeInfo.pTexelBufferView = nullptr;
952
953 writeCount++;
Luc Ferron90968362018-05-04 08:47:22 -0400954 }
Jamie Madill5547b382017-10-23 18:16:01 -0400955 }
956
957 VkDevice device = contextVk->getDevice();
958
Jamie Madill4cc753e2018-06-13 13:25:33 -0400959 ASSERT(writeCount > 0);
960 vkUpdateDescriptorSets(device, writeCount, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill5547b382017-10-23 18:16:01 -0400961
962 mDirtyTextures = false;
Luc Ferron90968362018-05-04 08:47:22 -0400963 return gl::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400964}
965
966void ProgramVk::invalidateTextures()
967{
968 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400969}
970
Jamie Madill9b168d02018-06-13 13:25:32 -0400971const vk::PipelineLayout &ProgramVk::getPipelineLayout() const
972{
973 return mPipelineLayout.get();
974}
975
Luc Ferron7a06ac12018-03-15 10:17:04 -0400976void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
977{
978 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
979 {
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400980 block.storage.setMinimumSizeForTesting(minSize);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400981 }
982}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400983} // namespace rx