blob: 36a809e2f2b5aa3d92e8c4e6cf391c69d8666d9e [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Luc Ferron48cdc2e2018-05-31 09:58:34 -040015#include "libANGLE/renderer/renderer_utils.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050016#include "libANGLE/renderer/vulkan/ContextVk.h"
17#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
18#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040019#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040020
21namespace rx
22{
23
Jamie Madill76e471e2017-10-21 09:56:01 -040024namespace
25{
26
Jamie Madillf3614372018-03-31 14:19:14 -040027constexpr size_t kUniformBlockDynamicBufferMinSize = 256 * 128;
Luc Ferron7a06ac12018-03-15 10:17:04 -040028
Jamie Madill76e471e2017-10-21 09:56:01 -040029gl::Error InitDefaultUniformBlock(const gl::Context *context,
Jamie Madill76e471e2017-10-21 09:56:01 -040030 gl::Shader *shader,
Jamie Madill76e471e2017-10-21 09:56:01 -040031 sh::BlockLayoutMap *blockLayoutMapOut,
Luc Ferron7a06ac12018-03-15 10:17:04 -040032 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040033{
34 const auto &uniforms = shader->getUniforms(context);
35
36 if (uniforms.empty())
37 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040038 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040039 return gl::NoError();
40 }
41
42 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020043 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040044
45 size_t blockSize = blockEncoder.getBlockSize();
46
47 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
48 if (blockSize == 0)
49 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040050 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040051 return gl::NoError();
52 }
53
Luc Ferron7a06ac12018-03-15 10:17:04 -040054 *blockSizeOut = blockSize;
Jamie Madill76e471e2017-10-21 09:56:01 -040055 return gl::NoError();
56}
57
58template <typename T>
59void UpdateDefaultUniformBlock(GLsizei count,
Luc Ferron2371aca2018-03-27 16:03:03 -040060 uint32_t arrayIndex,
Jamie Madill76e471e2017-10-21 09:56:01 -040061 int componentCount,
62 const T *v,
63 const sh::BlockMemberInfo &layoutInfo,
64 angle::MemoryBuffer *uniformData)
65{
Luc Ferron2371aca2018-03-27 16:03:03 -040066 const int elementSize = sizeof(T) * componentCount;
67
68 uint8_t *dst = uniformData->data() + layoutInfo.offset;
Jamie Madill76e471e2017-10-21 09:56:01 -040069 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
70 {
Luc Ferron2371aca2018-03-27 16:03:03 -040071 uint32_t arrayOffset = arrayIndex * layoutInfo.arrayStride;
72 uint8_t *writePtr = dst + arrayOffset;
Jamie Madill76e471e2017-10-21 09:56:01 -040073 memcpy(writePtr, v, elementSize * count);
74 }
75 else
76 {
Luc Ferron2371aca2018-03-27 16:03:03 -040077 // Have to respect the arrayStride between each element of the array.
78 int maxIndex = arrayIndex + count;
79 for (int writeIndex = arrayIndex, readIndex = 0; writeIndex < maxIndex;
80 writeIndex++, readIndex++)
81 {
82 const int arrayOffset = writeIndex * layoutInfo.arrayStride;
83 uint8_t *writePtr = dst + arrayOffset;
Luc Ferrone9465a62018-06-04 10:41:52 -040084 const T *readPtr = v + (readIndex * componentCount);
Luc Ferron2371aca2018-03-27 16:03:03 -040085 memcpy(writePtr, readPtr, elementSize);
86 }
Jamie Madill76e471e2017-10-21 09:56:01 -040087 }
88}
89
Luc Ferron7cec3352018-03-13 13:29:34 -040090template <typename T>
91void ReadFromDefaultUniformBlock(int componentCount,
Luc Ferron2371aca2018-03-27 16:03:03 -040092 uint32_t arrayIndex,
Luc Ferron7cec3352018-03-13 13:29:34 -040093 T *dst,
94 const sh::BlockMemberInfo &layoutInfo,
95 const angle::MemoryBuffer *uniformData)
96{
97 ASSERT(layoutInfo.offset != -1);
98
Luc Ferron2371aca2018-03-27 16:03:03 -040099 const int elementSize = sizeof(T) * componentCount;
100 const uint8_t *source = uniformData->data() + layoutInfo.offset;
101
Luc Ferron7cec3352018-03-13 13:29:34 -0400102 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
103 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400104 const uint8_t *readPtr = source + arrayIndex * layoutInfo.arrayStride;
Luc Ferron7cec3352018-03-13 13:29:34 -0400105 memcpy(dst, readPtr, elementSize);
106 }
107 else
108 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400109 // Have to respect the arrayStride between each element of the array.
110 const int arrayOffset = arrayIndex * layoutInfo.arrayStride;
111 const uint8_t *readPtr = source + arrayOffset;
112 memcpy(dst, readPtr, elementSize);
Luc Ferron7cec3352018-03-13 13:29:34 -0400113 }
114}
115
Jamie Madillc3755fc2018-04-05 08:39:13 -0400116vk::Error SyncDefaultUniformBlock(RendererVk *renderer,
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400117 vk::DynamicBuffer *dynamicBuffer,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400118 const angle::MemoryBuffer &bufferData,
119 uint32_t *outOffset,
120 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400121{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400122 ASSERT(!bufferData.empty());
123 uint8_t *data = nullptr;
124 VkBuffer *outBuffer = nullptr;
125 uint32_t offset;
Jamie Madillc3755fc2018-04-05 08:39:13 -0400126 ANGLE_TRY(dynamicBuffer->allocate(renderer, bufferData.size(), &data, outBuffer, &offset,
127 outBufferModified));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400128 *outOffset = offset;
129 memcpy(data, bufferData.data(), bufferData.size());
Jamie Madillc3755fc2018-04-05 08:39:13 -0400130 ANGLE_TRY(dynamicBuffer->flush(renderer->getDevice()));
Jamie Madill76e471e2017-10-21 09:56:01 -0400131 return vk::NoError();
132}
Jamie Madill76e471e2017-10-21 09:56:01 -0400133} // anonymous namespace
134
135ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400136 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
Jamie Madillf3614372018-03-31 14:19:14 -0400137 kUniformBlockDynamicBufferMinSize),
Luc Ferron7a06ac12018-03-15 10:17:04 -0400138 uniformData(),
139 uniformsDirty(false),
140 uniformLayout()
Jamie Madill76e471e2017-10-21 09:56:01 -0400141{
142}
143
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500144ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
145{
146}
147
Jamie Madill76e471e2017-10-21 09:56:01 -0400148ProgramVk::ProgramVk(const gl::ProgramState &state)
Luc Ferron7a06ac12018-03-15 10:17:04 -0400149 : ProgramImpl(state),
150 mDefaultUniformBlocks(),
151 mUniformBlocksOffsets(),
152 mUsedDescriptorSetRange(),
153 mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400154{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400155 mUniformBlocksOffsets.fill(0);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500156 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400157}
158
159ProgramVk::~ProgramVk()
160{
161}
162
Jamie Madillb7d924a2018-03-10 11:16:54 -0500163gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500164{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500165 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500166 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400167}
Jamie Madill5deea722017-02-16 10:44:46 -0500168
Jamie Madillb7d924a2018-03-10 11:16:54 -0500169vk::Error ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400170{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500171 // TODO(jmadill): Handle re-linking a program that is in-use. http://anglebug.com/2397
172
173 VkDevice device = contextVk->getDevice();
174
Jamie Madill76e471e2017-10-21 09:56:01 -0400175 for (auto &uniformBlock : mDefaultUniformBlocks)
176 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400177 uniformBlock.storage.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -0400178 }
179
180 mEmptyUniformBlockStorage.memory.destroy(device);
181 mEmptyUniformBlockStorage.buffer.destroy(device);
182
Jamie Madill5deea722017-02-16 10:44:46 -0500183 mLinkedFragmentModule.destroy(device);
184 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500185 mVertexModuleSerial = Serial();
186 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400187
Jamie Madill5547b382017-10-23 18:16:01 -0400188 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500189 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400190 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500191
192 return vk::NoError();
Jamie Madill5deea722017-02-16 10:44:46 -0500193}
194
Jamie Madill9cf9e872017-06-05 12:59:25 -0400195gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
196 gl::InfoLog &infoLog,
197 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400198{
199 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500200 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400201}
202
Jamie Madill27a60632017-06-30 15:12:01 -0400203void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400204{
205 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400206}
207
208void ProgramVk::setBinaryRetrievableHint(bool retrievable)
209{
210 UNIMPLEMENTED();
211}
212
Yunchao He61afff12017-03-14 15:34:03 +0800213void ProgramVk::setSeparable(bool separable)
214{
215 UNIMPLEMENTED();
216}
217
Jamie Madill9cf9e872017-06-05 12:59:25 -0400218gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500219 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400220 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400221{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400222 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400223 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500224 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400225 VkDevice device = renderer->getDevice();
226
Jamie Madillb7d924a2018-03-10 11:16:54 -0500227 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500228
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500229 std::vector<uint32_t> vertexCode;
230 std::vector<uint32_t> fragmentCode;
231 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500232 ANGLE_TRY_RESULT(
233 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
234 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500235 if (!linkSuccess)
236 {
237 return false;
238 }
239
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500240 {
241 VkShaderModuleCreateInfo vertexShaderInfo;
242 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
243 vertexShaderInfo.pNext = nullptr;
244 vertexShaderInfo.flags = 0;
245 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
246 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400247
248 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madill78feddc2018-04-27 11:45:05 -0400249 mVertexModuleSerial = renderer->issueShaderSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500250 }
251
252 {
253 VkShaderModuleCreateInfo fragmentShaderInfo;
254 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
255 fragmentShaderInfo.pNext = nullptr;
256 fragmentShaderInfo.flags = 0;
257 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
258 fragmentShaderInfo.pCode = fragmentCode.data();
259
Jamie Madillc5143482017-10-15 20:20:06 -0400260 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madill78feddc2018-04-27 11:45:05 -0400261 mFragmentModuleSerial = renderer->issueShaderSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500262 }
263
Jamie Madill76e471e2017-10-21 09:56:01 -0400264 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500265
Jamie Madill8c3988c2017-12-21 14:44:56 -0500266 if (!mState.getSamplerUniformRange().empty())
267 {
268 // Ensure the descriptor set range includes the textures at position 1.
269 mUsedDescriptorSetRange.extend(1);
270 mDirtyTextures = true;
271 }
272
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500273 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400274}
275
Jamie Madill76e471e2017-10-21 09:56:01 -0400276gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
277{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400278 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500279 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400280 VkDevice device = contextVk->getDevice();
281
282 // Process vertex and fragment uniforms into std140 packing.
Jamie Madill33318de2018-05-01 11:22:54 -0400283 vk::ShaderMap<sh::BlockLayoutMap> layoutMap;
284 vk::ShaderMap<size_t> requiredBufferSize;
285 requiredBufferSize.fill(0);
Jamie Madill76e471e2017-10-21 09:56:01 -0400286
Jamie Madill33318de2018-05-01 11:22:54 -0400287 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400288 {
Jamie Madill33318de2018-05-01 11:22:54 -0400289 gl::ShaderType glShaderType = static_cast<gl::ShaderType>(shaderType);
290 ANGLE_TRY(InitDefaultUniformBlock(glContext, mState.getAttachedShader(glShaderType),
291 &layoutMap[shaderType], &requiredBufferSize[shaderType]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400292 }
293
294 // Init the default block layout info.
295 const auto &locations = mState.getUniformLocations();
296 const auto &uniforms = mState.getUniforms();
297 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
298 {
Jamie Madill33318de2018-05-01 11:22:54 -0400299 vk::ShaderMap<sh::BlockMemberInfo> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400300
301 const auto &location = locations[locationIndex];
302 if (location.used() && !location.ignored)
303 {
Jamie Madillde03e002017-10-21 14:04:20 -0400304 const auto &uniform = uniforms[location.index];
305
306 if (uniform.isSampler())
307 continue;
308
Jamie Madill76e471e2017-10-21 09:56:01 -0400309 std::string uniformName = uniform.name;
310 if (uniform.isArray())
311 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400312 // Gets the uniform name without the [0] at the end.
313 uniformName = gl::ParseResourceName(uniformName, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400314 }
315
316 bool found = false;
317
Jamie Madill33318de2018-05-01 11:22:54 -0400318 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400319 {
Jamie Madill33318de2018-05-01 11:22:54 -0400320 auto it = layoutMap[shaderType].find(uniformName);
321 if (it != layoutMap[shaderType].end())
Jamie Madill76e471e2017-10-21 09:56:01 -0400322 {
323 found = true;
Jamie Madill33318de2018-05-01 11:22:54 -0400324 layoutInfo[shaderType] = it->second;
Jamie Madill76e471e2017-10-21 09:56:01 -0400325 }
326 }
327
328 ASSERT(found);
329 }
330
Jamie Madill33318de2018-05-01 11:22:54 -0400331 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400332 {
Jamie Madill33318de2018-05-01 11:22:54 -0400333 mDefaultUniformBlocks[shaderType].uniformLayout.push_back(layoutInfo[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400334 }
335 }
336
337 bool anyDirty = false;
338 bool allDirty = true;
339
Jamie Madill33318de2018-05-01 11:22:54 -0400340 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400341 {
Jamie Madill33318de2018-05-01 11:22:54 -0400342 if (requiredBufferSize[shaderType] > 0)
Jamie Madill76e471e2017-10-21 09:56:01 -0400343 {
Jamie Madill33318de2018-05-01 11:22:54 -0400344 if (!mDefaultUniformBlocks[shaderType].uniformData.resize(
345 requiredBufferSize[shaderType]))
Jamie Madill76e471e2017-10-21 09:56:01 -0400346 {
347 return gl::OutOfMemory() << "Memory allocation failure.";
348 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400349 size_t minAlignment = static_cast<size_t>(
350 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
351
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400352 mDefaultUniformBlocks[shaderType].storage.init(minAlignment, renderer);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400353
354 // Initialize uniform buffer memory to zero by default.
Jamie Madill33318de2018-05-01 11:22:54 -0400355 mDefaultUniformBlocks[shaderType].uniformData.fill(0);
356 mDefaultUniformBlocks[shaderType].uniformsDirty = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400357
358 anyDirty = true;
359 }
360 else
361 {
362 allDirty = false;
363 }
364 }
365
366 if (anyDirty)
367 {
368 // Initialize the "empty" uniform block if necessary.
369 if (!allDirty)
370 {
371 VkBufferCreateInfo uniformBufferInfo;
372 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
373 uniformBufferInfo.pNext = nullptr;
374 uniformBufferInfo.flags = 0;
375 uniformBufferInfo.size = 1;
376 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
377 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
378 uniformBufferInfo.queueFamilyIndexCount = 0;
379 uniformBufferInfo.pQueueFamilyIndices = nullptr;
380
381 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
382
Luc Ferron7a06ac12018-03-15 10:17:04 -0400383 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500384 VkMemoryPropertyFlags flags =
385 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400386 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500387 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400388 &mEmptyUniformBlockStorage.memory, &requiredSize));
389 }
390
Jamie Madill8c3988c2017-12-21 14:44:56 -0500391 // Ensure the descriptor set range includes the uniform buffers at position 0.
392 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400393 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400394
395 return gl::NoError();
396}
397
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400398GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
399{
Luc Ferronfba1f612018-06-04 14:37:17 -0400400 // No-op. The spec is very vague about the behavior of validation.
401 return GL_TRUE;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400402}
403
Jamie Madill76e471e2017-10-21 09:56:01 -0400404template <typename T>
405void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
406{
407 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
408 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
409
Luc Ferron7cec3352018-03-13 13:29:34 -0400410 if (linkedUniform.isSampler())
411 {
Jamie Madill0cb6dc42018-04-16 10:36:39 -0400412 // We could potentially cache some indexing here. For now this is a no-op since the mapping
413 // is handled entirely in ContextVk.
Luc Ferron7cec3352018-03-13 13:29:34 -0400414 return;
415 }
416
Luc Ferron24a31372018-04-04 11:49:14 -0400417 if (linkedUniform.typeInfo->type == entryPointType)
Jamie Madill76e471e2017-10-21 09:56:01 -0400418 {
Luc Ferron24a31372018-04-04 11:49:14 -0400419 for (auto &uniformBlock : mDefaultUniformBlocks)
Jamie Madill76e471e2017-10-21 09:56:01 -0400420 {
Luc Ferron24a31372018-04-04 11:49:14 -0400421 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400422
Luc Ferron24a31372018-04-04 11:49:14 -0400423 // Assume an offset of -1 means the block is unused.
424 if (layoutInfo.offset == -1)
425 {
426 continue;
427 }
428
429 const GLint componentCount = linkedUniform.typeInfo->componentCount;
Luc Ferron62059a52018-03-29 07:01:35 -0400430 UpdateDefaultUniformBlock(count, locationInfo.arrayIndex, componentCount, v, layoutInfo,
431 &uniformBlock.uniformData);
Luc Ferron24a31372018-04-04 11:49:14 -0400432 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400433 }
Luc Ferron24a31372018-04-04 11:49:14 -0400434 }
435 else
436 {
437 for (auto &uniformBlock : mDefaultUniformBlocks)
Luc Ferron62059a52018-03-29 07:01:35 -0400438 {
Luc Ferron24a31372018-04-04 11:49:14 -0400439 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
440
441 // Assume an offset of -1 means the block is unused.
442 if (layoutInfo.offset == -1)
443 {
444 continue;
445 }
446
447 const GLint componentCount = linkedUniform.typeInfo->componentCount;
448
Luc Ferron62059a52018-03-29 07:01:35 -0400449 ASSERT(linkedUniform.typeInfo->type == gl::VariableBoolVectorType(entryPointType));
450
Luc Ferrond91c3792018-04-06 09:36:36 -0400451 GLint initialArrayOffset =
452 locationInfo.arrayIndex * layoutInfo.arrayStride + layoutInfo.offset;
Luc Ferron62059a52018-03-29 07:01:35 -0400453 for (GLint i = 0; i < count; i++)
454 {
455 GLint elementOffset = i * layoutInfo.arrayStride + initialArrayOffset;
456 GLint *dest =
457 reinterpret_cast<GLint *>(uniformBlock.uniformData.data() + elementOffset);
458 const T *source = v + i * componentCount;
459
460 for (int c = 0; c < componentCount; c++)
461 {
462 dest[c] = (source[c] == static_cast<T>(0)) ? GL_FALSE : GL_TRUE;
463 }
464 }
Luc Ferron24a31372018-04-04 11:49:14 -0400465 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400466 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400467 }
468}
469
Luc Ferron7cec3352018-03-13 13:29:34 -0400470template <typename T>
471void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
472{
473 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
474 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
475
476 if (linkedUniform.isSampler())
477 {
478 UNIMPLEMENTED();
479 return;
480 }
481
Olli Etuaho107c7242018-03-20 15:45:35 +0200482 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800483 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400484
Jiawei Shao385b3e02018-03-21 09:43:28 +0800485 const DefaultUniformBlock &uniformBlock =
Jamie Madill33318de2018-05-01 11:22:54 -0400486 mDefaultUniformBlocks[static_cast<vk::ShaderType>(shaderType)];
Luc Ferron7cec3352018-03-13 13:29:34 -0400487 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400488
489 ASSERT(linkedUniform.typeInfo->componentType == entryPointType ||
490 linkedUniform.typeInfo->componentType == gl::VariableBoolVectorType(entryPointType));
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400491
492 if (gl::IsMatrixType(linkedUniform.type))
493 {
494 const uint8_t *ptrToElement = uniformBlock.uniformData.data() + layoutInfo.offset +
495 (locationInfo.arrayIndex * linkedUniform.getElementSize());
496 GetMatrixUniform(linkedUniform.type, v, reinterpret_cast<const T *>(ptrToElement), false);
497 }
498 else
499 {
500 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, locationInfo.arrayIndex,
501 v, layoutInfo, &uniformBlock.uniformData);
502 }
Luc Ferron7cec3352018-03-13 13:29:34 -0400503}
504
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400505void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
506{
Jamie Madill76e471e2017-10-21 09:56:01 -0400507 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400508}
509
510void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
511{
Jamie Madill76e471e2017-10-21 09:56:01 -0400512 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400513}
514
515void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
516{
Jamie Madill76e471e2017-10-21 09:56:01 -0400517 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400518}
519
520void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
521{
Jamie Madill76e471e2017-10-21 09:56:01 -0400522 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400523}
524
525void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
526{
Luc Ferron7cec3352018-03-13 13:29:34 -0400527 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400528}
529
530void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
531{
Luc Ferron489243f2018-03-28 16:55:28 -0400532 setUniformImpl(location, count, v, GL_INT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400533}
534
535void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
536{
Luc Ferron489243f2018-03-28 16:55:28 -0400537 setUniformImpl(location, count, v, GL_INT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400538}
539
540void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
541{
Luc Ferron489243f2018-03-28 16:55:28 -0400542 setUniformImpl(location, count, v, GL_INT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400543}
544
545void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
546{
547 UNIMPLEMENTED();
548}
549
550void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
551{
552 UNIMPLEMENTED();
553}
554
555void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
556{
557 UNIMPLEMENTED();
558}
559
560void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
561{
562 UNIMPLEMENTED();
563}
564
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400565template <int cols, int rows>
566void ProgramVk::setUniformMatrixfv(GLint location,
567 GLsizei count,
568 GLboolean transpose,
569 const GLfloat *value)
570{
571 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
572 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
573
574 for (auto &uniformBlock : mDefaultUniformBlocks)
575 {
576 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
577
578 // Assume an offset of -1 means the block is unused.
579 if (layoutInfo.offset == -1)
580 {
581 continue;
582 }
583
Luc Ferronc8fbff32018-06-04 10:30:48 -0400584 bool updated = SetFloatUniformMatrix<cols, rows>(
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400585 locationInfo.arrayIndex, linkedUniform.getArraySizeProduct(), count, transpose, value,
586 uniformBlock.uniformData.data() + layoutInfo.offset);
Luc Ferronc8fbff32018-06-04 10:30:48 -0400587
588 // If the uniformsDirty flag was true, we don't want to flip it to false here if the
589 // setter did not update any data. We still want the uniform to be included when we'll
590 // update the descriptor sets.
591 uniformBlock.uniformsDirty = uniformBlock.uniformsDirty || updated;
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400592 }
593}
594
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400595void ProgramVk::setUniformMatrix2fv(GLint location,
596 GLsizei count,
597 GLboolean transpose,
598 const GLfloat *value)
599{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400600 setUniformMatrixfv<2, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400601}
602
603void ProgramVk::setUniformMatrix3fv(GLint location,
604 GLsizei count,
605 GLboolean transpose,
606 const GLfloat *value)
607{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400608 setUniformMatrixfv<3, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400609}
610
611void ProgramVk::setUniformMatrix4fv(GLint location,
612 GLsizei count,
613 GLboolean transpose,
614 const GLfloat *value)
615{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400616 setUniformMatrixfv<4, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400617}
618
619void ProgramVk::setUniformMatrix2x3fv(GLint location,
620 GLsizei count,
621 GLboolean transpose,
622 const GLfloat *value)
623{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400624 setUniformMatrixfv<2, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400625}
626
627void ProgramVk::setUniformMatrix3x2fv(GLint location,
628 GLsizei count,
629 GLboolean transpose,
630 const GLfloat *value)
631{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400632 setUniformMatrixfv<3, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400633}
634
635void ProgramVk::setUniformMatrix2x4fv(GLint location,
636 GLsizei count,
637 GLboolean transpose,
638 const GLfloat *value)
639{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400640 setUniformMatrixfv<2, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400641}
642
643void ProgramVk::setUniformMatrix4x2fv(GLint location,
644 GLsizei count,
645 GLboolean transpose,
646 const GLfloat *value)
647{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400648 setUniformMatrixfv<4, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400649}
650
651void ProgramVk::setUniformMatrix3x4fv(GLint location,
652 GLsizei count,
653 GLboolean transpose,
654 const GLfloat *value)
655{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400656 setUniformMatrixfv<3, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400657}
658
659void ProgramVk::setUniformMatrix4x3fv(GLint location,
660 GLsizei count,
661 GLboolean transpose,
662 const GLfloat *value)
663{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400664 setUniformMatrixfv<4, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400665}
666
667void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
668{
669 UNIMPLEMENTED();
670}
671
Sami Väisänen46eaa942016-06-29 10:26:37 +0300672void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
673 GLenum genMode,
674 GLint components,
675 const GLfloat *coeffs)
676{
677 UNIMPLEMENTED();
678}
679
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500680const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
681{
682 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
683 return mLinkedVertexModule;
684}
685
Jamie Madillf2f6d372018-01-10 21:37:23 -0500686Serial ProgramVk::getVertexModuleSerial() const
687{
688 return mVertexModuleSerial;
689}
690
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500691const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
692{
693 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
694 return mLinkedFragmentModule;
695}
696
Jamie Madillf2f6d372018-01-10 21:37:23 -0500697Serial ProgramVk::getFragmentModuleSerial() const
698{
699 return mFragmentModuleSerial;
700}
701
Luc Ferron6ea1b412018-03-21 16:13:01 -0400702vk::Error ProgramVk::allocateDescriptorSet(ContextVk *contextVk, uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400703{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500704 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400705
706 // Write out to a new a descriptor set.
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400707 vk::DynamicDescriptorPool *dynamicDescriptorPool = contextVk->getDynamicDescriptorPool();
Jamie Madill76e471e2017-10-21 09:56:01 -0400708
Luc Ferron6ea1b412018-03-21 16:13:01 -0400709 uint32_t potentialNewCount = descriptorSetIndex + 1;
710 if (potentialNewCount > mDescriptorSets.size())
711 {
712 mDescriptorSets.resize(potentialNewCount, VK_NULL_HANDLE);
713 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400714
Jamie Madillc7918ce2018-06-13 13:25:31 -0400715 const vk::DescriptorSetLayout &descriptorSetLayout =
716 renderer->getGraphicsDescriptorSetLayout(descriptorSetIndex);
Luc Ferron6ea1b412018-03-21 16:13:01 -0400717
Jamie Madillc7918ce2018-06-13 13:25:31 -0400718 ANGLE_TRY(dynamicDescriptorPool->allocateDescriptorSets(contextVk, descriptorSetLayout.ptr(), 1,
Luc Ferron6ea1b412018-03-21 16:13:01 -0400719 &mDescriptorSets[descriptorSetIndex]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400720 return vk::NoError();
721}
722
Jamie Madill54164b02017-08-28 15:17:37 -0400723void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
724{
Luc Ferron7cec3352018-03-13 13:29:34 -0400725 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400726}
727
728void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
729{
Luc Ferron7cec3352018-03-13 13:29:34 -0400730 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400731}
732
733void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
734{
735 UNIMPLEMENTED();
736}
737
Jamie Madill76e471e2017-10-21 09:56:01 -0400738vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
739{
Jamie Madill33318de2018-05-01 11:22:54 -0400740 if (!mDefaultUniformBlocks[vk::ShaderType::VertexShader].uniformsDirty &&
741 !mDefaultUniformBlocks[vk::ShaderType::FragmentShader].uniformsDirty)
Jamie Madill76e471e2017-10-21 09:56:01 -0400742 {
743 return vk::NoError();
744 }
745
Jamie Madill8c3988c2017-12-21 14:44:56 -0500746 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400747
Jamie Madill76e471e2017-10-21 09:56:01 -0400748 // Update buffer memory by immediate mapping. This immediate update only works once.
749 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400750 bool anyNewBufferAllocated = false;
Jamie Madill33318de2018-05-01 11:22:54 -0400751 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400752 {
Jamie Madill33318de2018-05-01 11:22:54 -0400753 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400754
Jamie Madill76e471e2017-10-21 09:56:01 -0400755 if (uniformBlock.uniformsDirty)
756 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400757 bool bufferModified = false;
Jamie Madillc3755fc2018-04-05 08:39:13 -0400758 ANGLE_TRY(SyncDefaultUniformBlock(contextVk->getRenderer(), &uniformBlock.storage,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400759 uniformBlock.uniformData,
Jamie Madill33318de2018-05-01 11:22:54 -0400760 &mUniformBlocksOffsets[shaderType], &bufferModified));
Jamie Madill76e471e2017-10-21 09:56:01 -0400761 uniformBlock.uniformsDirty = false;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400762
763 if (bufferModified)
764 {
765 anyNewBufferAllocated = true;
766 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400767 }
768 }
769
Luc Ferron7a06ac12018-03-15 10:17:04 -0400770 if (anyNewBufferAllocated)
771 {
772 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
773 // modify the descriptor sets once initialized.
Jamie Madillc7918ce2018-06-13 13:25:31 -0400774 ANGLE_TRY(allocateDescriptorSet(contextVk, kUniformsDescriptorSetIndex));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400775 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
776 }
777
Jamie Madill76e471e2017-10-21 09:56:01 -0400778 return vk::NoError();
779}
780
781vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
782{
Jamie Madill33318de2018-05-01 11:22:54 -0400783 vk::ShaderMap<VkDescriptorBufferInfo> descriptorBufferInfo;
784 vk::ShaderMap<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400785
Jamie Madill33318de2018-05-01 11:22:54 -0400786 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400787 {
Jamie Madill33318de2018-05-01 11:22:54 -0400788 auto &uniformBlock = mDefaultUniformBlocks[shaderType];
789 auto &bufferInfo = descriptorBufferInfo[shaderType];
790 auto &writeInfo = writeDescriptorInfo[shaderType];
Jamie Madill76e471e2017-10-21 09:56:01 -0400791
792 if (!uniformBlock.uniformData.empty())
793 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400794 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400795 }
796 else
797 {
798 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
799 }
800
801 bufferInfo.offset = 0;
802 bufferInfo.range = VK_WHOLE_SIZE;
803
Jamie Madill76e471e2017-10-21 09:56:01 -0400804 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
805 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400806 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill33318de2018-05-01 11:22:54 -0400807 writeInfo.dstBinding = static_cast<uint32_t>(shaderType);
Jamie Madill76e471e2017-10-21 09:56:01 -0400808 writeInfo.dstArrayElement = 0;
809 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400810 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400811 writeInfo.pImageInfo = nullptr;
812 writeInfo.pBufferInfo = &bufferInfo;
813 writeInfo.pTexelBufferView = nullptr;
Jamie Madill76e471e2017-10-21 09:56:01 -0400814 }
815
816 VkDevice device = contextVk->getDevice();
817
Jamie Madill33318de2018-05-01 11:22:54 -0400818 vkUpdateDescriptorSets(device, 2, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400819
820 return vk::NoError();
821}
822
Jamie Madill5547b382017-10-23 18:16:01 -0400823const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400824{
Jamie Madill5547b382017-10-23 18:16:01 -0400825 return mDescriptorSets;
826}
827
Luc Ferron7a06ac12018-03-15 10:17:04 -0400828const uint32_t *ProgramVk::getDynamicOffsets()
829{
830 // If we have no descriptor set being used, we do not need to specify any offsets when binding
831 // the descriptor sets.
832 if (!mUsedDescriptorSetRange.contains(0))
833 return nullptr;
834
835 return mUniformBlocksOffsets.data();
836}
837
838uint32_t ProgramVk::getDynamicOffsetsCount()
839{
840 if (!mUsedDescriptorSetRange.contains(0))
841 return 0;
842
843 return static_cast<uint32_t>(mUniformBlocksOffsets.size());
844}
845
Jamie Madill8c3988c2017-12-21 14:44:56 -0500846const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400847{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500848 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400849}
850
Luc Ferron90968362018-05-04 08:47:22 -0400851gl::Error ProgramVk::updateTexturesDescriptorSet(const gl::Context *context)
Jamie Madill5547b382017-10-23 18:16:01 -0400852{
853 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
854 {
Luc Ferron90968362018-05-04 08:47:22 -0400855 return gl::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400856 }
857
Luc Ferron90968362018-05-04 08:47:22 -0400858 ContextVk *contextVk = GetImplAs<ContextVk>(context);
Jamie Madillc7918ce2018-06-13 13:25:31 -0400859 ANGLE_TRY(allocateDescriptorSet(contextVk, kTextureDescriptorSetIndex));
Luc Ferron6ea1b412018-03-21 16:13:01 -0400860
Jamie Madill8c3988c2017-12-21 14:44:56 -0500861 ASSERT(mUsedDescriptorSetRange.contains(1));
Jamie Madillc7918ce2018-06-13 13:25:31 -0400862 VkDescriptorSet descriptorSet = mDescriptorSets[kTextureDescriptorSetIndex];
Jamie Madill5547b382017-10-23 18:16:01 -0400863
864 // TODO(jmadill): Don't hard-code the texture limit.
865 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
866 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
867 uint32_t imageCount = 0;
868
869 const gl::State &glState = contextVk->getGLState();
870 const auto &completeTextures = glState.getCompleteTextureCache();
871
Jamie Madill858c1cc2018-03-31 14:19:13 -0400872 for (const gl::SamplerBinding &samplerBinding : mState.getSamplerBindings())
Jamie Madill5547b382017-10-23 18:16:01 -0400873 {
874 ASSERT(!samplerBinding.unreferenced);
875
876 // TODO(jmadill): Sampler arrays
877 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
878
Luc Ferron90968362018-05-04 08:47:22 -0400879 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
880 gl::Texture *texture = completeTextures[textureUnit];
Jamie Madill5547b382017-10-23 18:16:01 -0400881
Luc Ferron90968362018-05-04 08:47:22 -0400882 if (texture == nullptr)
883 {
884 // If we have an incomplete texture, fetch it from our renderer.
885 ANGLE_TRY(
886 contextVk->getIncompleteTexture(context, samplerBinding.textureType, &texture));
887 }
Jamie Madill5547b382017-10-23 18:16:01 -0400888
Jamie Madille1f3ad42017-10-28 23:00:42 -0400889 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill858c1cc2018-03-31 14:19:13 -0400890 const vk::ImageHelper &image = textureVk->getImage();
Jamie Madill5547b382017-10-23 18:16:01 -0400891
892 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
893
894 imageInfo.sampler = textureVk->getSampler().getHandle();
895 imageInfo.imageView = textureVk->getImageView().getHandle();
896 imageInfo.imageLayout = image.getCurrentLayout();
897
898 auto &writeInfo = writeDescriptorInfo[imageCount];
899
900 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
901 writeInfo.pNext = nullptr;
902 writeInfo.dstSet = descriptorSet;
903 writeInfo.dstBinding = imageCount;
904 writeInfo.dstArrayElement = 0;
905 writeInfo.descriptorCount = 1;
906 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
907 writeInfo.pImageInfo = &imageInfo;
908 writeInfo.pBufferInfo = nullptr;
909 writeInfo.pTexelBufferView = nullptr;
910
911 imageCount++;
912 }
913
914 VkDevice device = contextVk->getDevice();
915
916 ASSERT(imageCount > 0);
917 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
918
919 mDirtyTextures = false;
Luc Ferron90968362018-05-04 08:47:22 -0400920 return gl::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400921}
922
923void ProgramVk::invalidateTextures()
924{
925 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400926}
927
Luc Ferron7a06ac12018-03-15 10:17:04 -0400928void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
929{
930 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
931 {
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400932 block.storage.setMinimumSizeForTesting(minSize);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400933 }
934}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400935} // namespace rx