blob: b4df3cdadfdb9e034b61a7a43d76c9ff48f41fe5 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
16#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
17#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040018#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
22
Jamie Madill76e471e2017-10-21 09:56:01 -040023namespace
24{
25
Jamie Madillf3614372018-03-31 14:19:14 -040026constexpr size_t kUniformBlockDynamicBufferMinSize = 256 * 128;
Luc Ferron7a06ac12018-03-15 10:17:04 -040027
Jamie Madill76e471e2017-10-21 09:56:01 -040028gl::Error InitDefaultUniformBlock(const gl::Context *context,
Jamie Madill76e471e2017-10-21 09:56:01 -040029 gl::Shader *shader,
Jamie Madill76e471e2017-10-21 09:56:01 -040030 sh::BlockLayoutMap *blockLayoutMapOut,
Luc Ferron7a06ac12018-03-15 10:17:04 -040031 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040032{
33 const auto &uniforms = shader->getUniforms(context);
34
35 if (uniforms.empty())
36 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040037 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040038 return gl::NoError();
39 }
40
41 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020042 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040043
44 size_t blockSize = blockEncoder.getBlockSize();
45
46 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
47 if (blockSize == 0)
48 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040049 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040050 return gl::NoError();
51 }
52
Luc Ferron7a06ac12018-03-15 10:17:04 -040053 *blockSizeOut = blockSize;
Jamie Madill76e471e2017-10-21 09:56:01 -040054 return gl::NoError();
55}
56
57template <typename T>
58void UpdateDefaultUniformBlock(GLsizei count,
Luc Ferron2371aca2018-03-27 16:03:03 -040059 uint32_t arrayIndex,
Jamie Madill76e471e2017-10-21 09:56:01 -040060 int componentCount,
61 const T *v,
62 const sh::BlockMemberInfo &layoutInfo,
63 angle::MemoryBuffer *uniformData)
64{
Luc Ferron2371aca2018-03-27 16:03:03 -040065 const int elementSize = sizeof(T) * componentCount;
66
67 uint8_t *dst = uniformData->data() + layoutInfo.offset;
Jamie Madill76e471e2017-10-21 09:56:01 -040068 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
69 {
Luc Ferron2371aca2018-03-27 16:03:03 -040070 uint32_t arrayOffset = arrayIndex * layoutInfo.arrayStride;
71 uint8_t *writePtr = dst + arrayOffset;
Jamie Madill76e471e2017-10-21 09:56:01 -040072 memcpy(writePtr, v, elementSize * count);
73 }
74 else
75 {
Luc Ferron2371aca2018-03-27 16:03:03 -040076 // Have to respect the arrayStride between each element of the array.
77 int maxIndex = arrayIndex + count;
78 for (int writeIndex = arrayIndex, readIndex = 0; writeIndex < maxIndex;
79 writeIndex++, readIndex++)
80 {
81 const int arrayOffset = writeIndex * layoutInfo.arrayStride;
82 uint8_t *writePtr = dst + arrayOffset;
83 const T *readPtr = v + readIndex;
84 memcpy(writePtr, readPtr, elementSize);
85 }
Jamie Madill76e471e2017-10-21 09:56:01 -040086 }
87}
88
Luc Ferron7cec3352018-03-13 13:29:34 -040089template <typename T>
90void ReadFromDefaultUniformBlock(int componentCount,
Luc Ferron2371aca2018-03-27 16:03:03 -040091 uint32_t arrayIndex,
Luc Ferron7cec3352018-03-13 13:29:34 -040092 T *dst,
93 const sh::BlockMemberInfo &layoutInfo,
94 const angle::MemoryBuffer *uniformData)
95{
96 ASSERT(layoutInfo.offset != -1);
97
Luc Ferron2371aca2018-03-27 16:03:03 -040098 const int elementSize = sizeof(T) * componentCount;
99 const uint8_t *source = uniformData->data() + layoutInfo.offset;
100
Luc Ferron7cec3352018-03-13 13:29:34 -0400101 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
102 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400103 const uint8_t *readPtr = source + arrayIndex * layoutInfo.arrayStride;
Luc Ferron7cec3352018-03-13 13:29:34 -0400104 memcpy(dst, readPtr, elementSize);
105 }
106 else
107 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400108 // Have to respect the arrayStride between each element of the array.
109 const int arrayOffset = arrayIndex * layoutInfo.arrayStride;
110 const uint8_t *readPtr = source + arrayOffset;
111 memcpy(dst, readPtr, elementSize);
Luc Ferron7cec3352018-03-13 13:29:34 -0400112 }
113}
114
Jamie Madillc3755fc2018-04-05 08:39:13 -0400115vk::Error SyncDefaultUniformBlock(RendererVk *renderer,
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400116 vk::DynamicBuffer *dynamicBuffer,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400117 const angle::MemoryBuffer &bufferData,
118 uint32_t *outOffset,
119 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400120{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400121 ASSERT(!bufferData.empty());
122 uint8_t *data = nullptr;
123 VkBuffer *outBuffer = nullptr;
124 uint32_t offset;
Jamie Madillc3755fc2018-04-05 08:39:13 -0400125 ANGLE_TRY(dynamicBuffer->allocate(renderer, bufferData.size(), &data, outBuffer, &offset,
126 outBufferModified));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400127 *outOffset = offset;
128 memcpy(data, bufferData.data(), bufferData.size());
Jamie Madillc3755fc2018-04-05 08:39:13 -0400129 ANGLE_TRY(dynamicBuffer->flush(renderer->getDevice()));
Jamie Madill76e471e2017-10-21 09:56:01 -0400130 return vk::NoError();
131}
Jamie Madill76e471e2017-10-21 09:56:01 -0400132} // anonymous namespace
133
134ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400135 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
Jamie Madillf3614372018-03-31 14:19:14 -0400136 kUniformBlockDynamicBufferMinSize),
Luc Ferron7a06ac12018-03-15 10:17:04 -0400137 uniformData(),
138 uniformsDirty(false),
139 uniformLayout()
Jamie Madill76e471e2017-10-21 09:56:01 -0400140{
141}
142
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500143ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
144{
145}
146
Jamie Madill76e471e2017-10-21 09:56:01 -0400147ProgramVk::ProgramVk(const gl::ProgramState &state)
Luc Ferron7a06ac12018-03-15 10:17:04 -0400148 : ProgramImpl(state),
149 mDefaultUniformBlocks(),
150 mUniformBlocksOffsets(),
151 mUsedDescriptorSetRange(),
152 mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400153{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400154 mUniformBlocksOffsets.fill(0);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500155 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400156}
157
158ProgramVk::~ProgramVk()
159{
160}
161
Jamie Madillb7d924a2018-03-10 11:16:54 -0500162gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500163{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500164 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500165 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400166}
Jamie Madill5deea722017-02-16 10:44:46 -0500167
Jamie Madillb7d924a2018-03-10 11:16:54 -0500168vk::Error ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400169{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500170 // TODO(jmadill): Handle re-linking a program that is in-use. http://anglebug.com/2397
171
172 VkDevice device = contextVk->getDevice();
173
Jamie Madill76e471e2017-10-21 09:56:01 -0400174 for (auto &uniformBlock : mDefaultUniformBlocks)
175 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400176 uniformBlock.storage.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -0400177 }
178
179 mEmptyUniformBlockStorage.memory.destroy(device);
180 mEmptyUniformBlockStorage.buffer.destroy(device);
181
Jamie Madill5deea722017-02-16 10:44:46 -0500182 mLinkedFragmentModule.destroy(device);
183 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500184 mVertexModuleSerial = Serial();
185 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400186
Jamie Madill5547b382017-10-23 18:16:01 -0400187 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500188 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400189 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500190
191 return vk::NoError();
Jamie Madill5deea722017-02-16 10:44:46 -0500192}
193
Jamie Madill9cf9e872017-06-05 12:59:25 -0400194gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
195 gl::InfoLog &infoLog,
196 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400197{
198 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500199 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400200}
201
Jamie Madill27a60632017-06-30 15:12:01 -0400202void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400203{
204 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400205}
206
207void ProgramVk::setBinaryRetrievableHint(bool retrievable)
208{
209 UNIMPLEMENTED();
210}
211
Yunchao He61afff12017-03-14 15:34:03 +0800212void ProgramVk::setSeparable(bool separable)
213{
214 UNIMPLEMENTED();
215}
216
Jamie Madill9cf9e872017-06-05 12:59:25 -0400217gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500218 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400219 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400220{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400221 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400222 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500223 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400224 VkDevice device = renderer->getDevice();
225
Jamie Madillb7d924a2018-03-10 11:16:54 -0500226 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500227
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500228 std::vector<uint32_t> vertexCode;
229 std::vector<uint32_t> fragmentCode;
230 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500231 ANGLE_TRY_RESULT(
232 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
233 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500234 if (!linkSuccess)
235 {
236 return false;
237 }
238
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500239 {
240 VkShaderModuleCreateInfo vertexShaderInfo;
241 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
242 vertexShaderInfo.pNext = nullptr;
243 vertexShaderInfo.flags = 0;
244 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
245 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400246
247 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500248 mVertexModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500249 }
250
251 {
252 VkShaderModuleCreateInfo fragmentShaderInfo;
253 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
254 fragmentShaderInfo.pNext = nullptr;
255 fragmentShaderInfo.flags = 0;
256 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
257 fragmentShaderInfo.pCode = fragmentCode.data();
258
Jamie Madillc5143482017-10-15 20:20:06 -0400259 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500260 mFragmentModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500261 }
262
Jamie Madill76e471e2017-10-21 09:56:01 -0400263 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500264
Jamie Madill8c3988c2017-12-21 14:44:56 -0500265 if (!mState.getSamplerUniformRange().empty())
266 {
267 // Ensure the descriptor set range includes the textures at position 1.
268 mUsedDescriptorSetRange.extend(1);
269 mDirtyTextures = true;
270 }
271
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500272 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400273}
274
Jamie Madill76e471e2017-10-21 09:56:01 -0400275gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
276{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400277 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500278 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400279 VkDevice device = contextVk->getDevice();
280
281 // Process vertex and fragment uniforms into std140 packing.
Jamie Madill33318de2018-05-01 11:22:54 -0400282 vk::ShaderMap<sh::BlockLayoutMap> layoutMap;
283 vk::ShaderMap<size_t> requiredBufferSize;
284 requiredBufferSize.fill(0);
Jamie Madill76e471e2017-10-21 09:56:01 -0400285
Jamie Madill33318de2018-05-01 11:22:54 -0400286 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400287 {
Jamie Madill33318de2018-05-01 11:22:54 -0400288 gl::ShaderType glShaderType = static_cast<gl::ShaderType>(shaderType);
289 ANGLE_TRY(InitDefaultUniformBlock(glContext, mState.getAttachedShader(glShaderType),
290 &layoutMap[shaderType], &requiredBufferSize[shaderType]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400291 }
292
293 // Init the default block layout info.
294 const auto &locations = mState.getUniformLocations();
295 const auto &uniforms = mState.getUniforms();
296 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
297 {
Jamie Madill33318de2018-05-01 11:22:54 -0400298 vk::ShaderMap<sh::BlockMemberInfo> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400299
300 const auto &location = locations[locationIndex];
301 if (location.used() && !location.ignored)
302 {
Jamie Madillde03e002017-10-21 14:04:20 -0400303 const auto &uniform = uniforms[location.index];
304
305 if (uniform.isSampler())
306 continue;
307
Jamie Madill76e471e2017-10-21 09:56:01 -0400308 std::string uniformName = uniform.name;
309 if (uniform.isArray())
310 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400311 // Gets the uniform name without the [0] at the end.
312 uniformName = gl::ParseResourceName(uniformName, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400313 }
314
315 bool found = false;
316
Jamie Madill33318de2018-05-01 11:22:54 -0400317 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400318 {
Jamie Madill33318de2018-05-01 11:22:54 -0400319 auto it = layoutMap[shaderType].find(uniformName);
320 if (it != layoutMap[shaderType].end())
Jamie Madill76e471e2017-10-21 09:56:01 -0400321 {
322 found = true;
Jamie Madill33318de2018-05-01 11:22:54 -0400323 layoutInfo[shaderType] = it->second;
Jamie Madill76e471e2017-10-21 09:56:01 -0400324 }
325 }
326
327 ASSERT(found);
328 }
329
Jamie Madill33318de2018-05-01 11:22:54 -0400330 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400331 {
Jamie Madill33318de2018-05-01 11:22:54 -0400332 mDefaultUniformBlocks[shaderType].uniformLayout.push_back(layoutInfo[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400333 }
334 }
335
336 bool anyDirty = false;
337 bool allDirty = true;
338
Jamie Madill33318de2018-05-01 11:22:54 -0400339 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400340 {
Jamie Madill33318de2018-05-01 11:22:54 -0400341 if (requiredBufferSize[shaderType] > 0)
Jamie Madill76e471e2017-10-21 09:56:01 -0400342 {
Jamie Madill33318de2018-05-01 11:22:54 -0400343 if (!mDefaultUniformBlocks[shaderType].uniformData.resize(
344 requiredBufferSize[shaderType]))
Jamie Madill76e471e2017-10-21 09:56:01 -0400345 {
346 return gl::OutOfMemory() << "Memory allocation failure.";
347 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400348 size_t minAlignment = static_cast<size_t>(
349 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
350
Jamie Madill33318de2018-05-01 11:22:54 -0400351 mDefaultUniformBlocks[shaderType].storage.init(minAlignment);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400352
353 // Initialize uniform buffer memory to zero by default.
Jamie Madill33318de2018-05-01 11:22:54 -0400354 mDefaultUniformBlocks[shaderType].uniformData.fill(0);
355 mDefaultUniformBlocks[shaderType].uniformsDirty = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400356
357 anyDirty = true;
358 }
359 else
360 {
361 allDirty = false;
362 }
363 }
364
365 if (anyDirty)
366 {
367 // Initialize the "empty" uniform block if necessary.
368 if (!allDirty)
369 {
370 VkBufferCreateInfo uniformBufferInfo;
371 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
372 uniformBufferInfo.pNext = nullptr;
373 uniformBufferInfo.flags = 0;
374 uniformBufferInfo.size = 1;
375 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
376 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
377 uniformBufferInfo.queueFamilyIndexCount = 0;
378 uniformBufferInfo.pQueueFamilyIndices = nullptr;
379
380 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
381
Luc Ferron7a06ac12018-03-15 10:17:04 -0400382 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500383 VkMemoryPropertyFlags flags =
384 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400385 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500386 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400387 &mEmptyUniformBlockStorage.memory, &requiredSize));
388 }
389
Jamie Madill8c3988c2017-12-21 14:44:56 -0500390 // Ensure the descriptor set range includes the uniform buffers at position 0.
391 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400392 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400393
394 return gl::NoError();
395}
396
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400397GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
398{
399 UNIMPLEMENTED();
400 return GLboolean();
401}
402
Jamie Madill76e471e2017-10-21 09:56:01 -0400403template <typename T>
404void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
405{
406 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
407 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
408
Luc Ferron7cec3352018-03-13 13:29:34 -0400409 if (linkedUniform.isSampler())
410 {
Jamie Madill0cb6dc42018-04-16 10:36:39 -0400411 // We could potentially cache some indexing here. For now this is a no-op since the mapping
412 // is handled entirely in ContextVk.
Luc Ferron7cec3352018-03-13 13:29:34 -0400413 return;
414 }
415
Luc Ferron24a31372018-04-04 11:49:14 -0400416 if (linkedUniform.typeInfo->type == entryPointType)
Jamie Madill76e471e2017-10-21 09:56:01 -0400417 {
Luc Ferron24a31372018-04-04 11:49:14 -0400418 for (auto &uniformBlock : mDefaultUniformBlocks)
Jamie Madill76e471e2017-10-21 09:56:01 -0400419 {
Luc Ferron24a31372018-04-04 11:49:14 -0400420 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400421
Luc Ferron24a31372018-04-04 11:49:14 -0400422 // Assume an offset of -1 means the block is unused.
423 if (layoutInfo.offset == -1)
424 {
425 continue;
426 }
427
428 const GLint componentCount = linkedUniform.typeInfo->componentCount;
Luc Ferron62059a52018-03-29 07:01:35 -0400429 UpdateDefaultUniformBlock(count, locationInfo.arrayIndex, componentCount, v, layoutInfo,
430 &uniformBlock.uniformData);
Luc Ferron24a31372018-04-04 11:49:14 -0400431 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400432 }
Luc Ferron24a31372018-04-04 11:49:14 -0400433 }
434 else
435 {
436 for (auto &uniformBlock : mDefaultUniformBlocks)
Luc Ferron62059a52018-03-29 07:01:35 -0400437 {
Luc Ferron24a31372018-04-04 11:49:14 -0400438 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
439
440 // Assume an offset of -1 means the block is unused.
441 if (layoutInfo.offset == -1)
442 {
443 continue;
444 }
445
446 const GLint componentCount = linkedUniform.typeInfo->componentCount;
447
Luc Ferron62059a52018-03-29 07:01:35 -0400448 ASSERT(linkedUniform.typeInfo->type == gl::VariableBoolVectorType(entryPointType));
449
Luc Ferrond91c3792018-04-06 09:36:36 -0400450 GLint initialArrayOffset =
451 locationInfo.arrayIndex * layoutInfo.arrayStride + layoutInfo.offset;
Luc Ferron62059a52018-03-29 07:01:35 -0400452 for (GLint i = 0; i < count; i++)
453 {
454 GLint elementOffset = i * layoutInfo.arrayStride + initialArrayOffset;
455 GLint *dest =
456 reinterpret_cast<GLint *>(uniformBlock.uniformData.data() + elementOffset);
457 const T *source = v + i * componentCount;
458
459 for (int c = 0; c < componentCount; c++)
460 {
461 dest[c] = (source[c] == static_cast<T>(0)) ? GL_FALSE : GL_TRUE;
462 }
463 }
Luc Ferron24a31372018-04-04 11:49:14 -0400464 uniformBlock.uniformsDirty = true;
Luc Ferron62059a52018-03-29 07:01:35 -0400465 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400466 }
467}
468
Luc Ferron7cec3352018-03-13 13:29:34 -0400469template <typename T>
470void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
471{
472 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
473 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
474
475 if (linkedUniform.isSampler())
476 {
477 UNIMPLEMENTED();
478 return;
479 }
480
Olli Etuaho107c7242018-03-20 15:45:35 +0200481 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800482 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400483
Jiawei Shao385b3e02018-03-21 09:43:28 +0800484 const DefaultUniformBlock &uniformBlock =
Jamie Madill33318de2018-05-01 11:22:54 -0400485 mDefaultUniformBlocks[static_cast<vk::ShaderType>(shaderType)];
Luc Ferron7cec3352018-03-13 13:29:34 -0400486 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400487
488 ASSERT(linkedUniform.typeInfo->componentType == entryPointType ||
489 linkedUniform.typeInfo->componentType == gl::VariableBoolVectorType(entryPointType));
Luc Ferron2371aca2018-03-27 16:03:03 -0400490 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, locationInfo.arrayIndex, v,
491 layoutInfo, &uniformBlock.uniformData);
Luc Ferron7cec3352018-03-13 13:29:34 -0400492}
493
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400494void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
495{
Jamie Madill76e471e2017-10-21 09:56:01 -0400496 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400497}
498
499void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
500{
Jamie Madill76e471e2017-10-21 09:56:01 -0400501 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400502}
503
504void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
505{
Jamie Madill76e471e2017-10-21 09:56:01 -0400506 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400507}
508
509void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
510{
Jamie Madill76e471e2017-10-21 09:56:01 -0400511 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400512}
513
514void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
515{
Luc Ferron7cec3352018-03-13 13:29:34 -0400516 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400517}
518
519void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
520{
Luc Ferron489243f2018-03-28 16:55:28 -0400521 setUniformImpl(location, count, v, GL_INT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400522}
523
524void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
525{
Luc Ferron489243f2018-03-28 16:55:28 -0400526 setUniformImpl(location, count, v, GL_INT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400527}
528
529void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
530{
Luc Ferron489243f2018-03-28 16:55:28 -0400531 setUniformImpl(location, count, v, GL_INT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400532}
533
534void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
535{
536 UNIMPLEMENTED();
537}
538
539void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
540{
541 UNIMPLEMENTED();
542}
543
544void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
545{
546 UNIMPLEMENTED();
547}
548
549void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
550{
551 UNIMPLEMENTED();
552}
553
554void ProgramVk::setUniformMatrix2fv(GLint location,
555 GLsizei count,
556 GLboolean transpose,
557 const GLfloat *value)
558{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400559 if (transpose == GL_TRUE)
560 {
561 UNIMPLEMENTED();
562 return;
563 }
564
565 setUniformImpl(location, count, value, GL_FLOAT_MAT2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400566}
567
568void ProgramVk::setUniformMatrix3fv(GLint location,
569 GLsizei count,
570 GLboolean transpose,
571 const GLfloat *value)
572{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400573 if (transpose == GL_TRUE)
574 {
575 UNIMPLEMENTED();
576 return;
577 }
578 setUniformImpl(location, count, value, GL_FLOAT_MAT3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400579}
580
581void ProgramVk::setUniformMatrix4fv(GLint location,
582 GLsizei count,
583 GLboolean transpose,
584 const GLfloat *value)
585{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400586 if (transpose == GL_TRUE)
587 {
588 UNIMPLEMENTED();
589 return;
590 }
591
592 setUniformImpl(location, count, value, GL_FLOAT_MAT4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400593}
594
595void ProgramVk::setUniformMatrix2x3fv(GLint location,
596 GLsizei count,
597 GLboolean transpose,
598 const GLfloat *value)
599{
600 UNIMPLEMENTED();
601}
602
603void ProgramVk::setUniformMatrix3x2fv(GLint location,
604 GLsizei count,
605 GLboolean transpose,
606 const GLfloat *value)
607{
608 UNIMPLEMENTED();
609}
610
611void ProgramVk::setUniformMatrix2x4fv(GLint location,
612 GLsizei count,
613 GLboolean transpose,
614 const GLfloat *value)
615{
616 UNIMPLEMENTED();
617}
618
619void ProgramVk::setUniformMatrix4x2fv(GLint location,
620 GLsizei count,
621 GLboolean transpose,
622 const GLfloat *value)
623{
624 UNIMPLEMENTED();
625}
626
627void ProgramVk::setUniformMatrix3x4fv(GLint location,
628 GLsizei count,
629 GLboolean transpose,
630 const GLfloat *value)
631{
632 UNIMPLEMENTED();
633}
634
635void ProgramVk::setUniformMatrix4x3fv(GLint location,
636 GLsizei count,
637 GLboolean transpose,
638 const GLfloat *value)
639{
640 UNIMPLEMENTED();
641}
642
643void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
644{
645 UNIMPLEMENTED();
646}
647
Sami Väisänen46eaa942016-06-29 10:26:37 +0300648void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
649 GLenum genMode,
650 GLint components,
651 const GLfloat *coeffs)
652{
653 UNIMPLEMENTED();
654}
655
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500656const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
657{
658 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
659 return mLinkedVertexModule;
660}
661
Jamie Madillf2f6d372018-01-10 21:37:23 -0500662Serial ProgramVk::getVertexModuleSerial() const
663{
664 return mVertexModuleSerial;
665}
666
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500667const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
668{
669 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
670 return mLinkedFragmentModule;
671}
672
Jamie Madillf2f6d372018-01-10 21:37:23 -0500673Serial ProgramVk::getFragmentModuleSerial() const
674{
675 return mFragmentModuleSerial;
676}
677
Luc Ferron6ea1b412018-03-21 16:13:01 -0400678vk::Error ProgramVk::allocateDescriptorSet(ContextVk *contextVk, uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400679{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500680 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400681
682 // Write out to a new a descriptor set.
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400683 vk::DynamicDescriptorPool *dynamicDescriptorPool = contextVk->getDynamicDescriptorPool();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500684 const auto &descriptorSetLayouts = renderer->getGraphicsDescriptorSetLayouts();
Jamie Madill76e471e2017-10-21 09:56:01 -0400685
Luc Ferron6ea1b412018-03-21 16:13:01 -0400686 uint32_t potentialNewCount = descriptorSetIndex + 1;
687 if (potentialNewCount > mDescriptorSets.size())
688 {
689 mDescriptorSets.resize(potentialNewCount, VK_NULL_HANDLE);
690 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400691
Luc Ferron6ea1b412018-03-21 16:13:01 -0400692 const VkDescriptorSetLayout *descriptorSetLayout =
693 descriptorSetLayouts[descriptorSetIndex].ptr();
694
695 ANGLE_TRY(dynamicDescriptorPool->allocateDescriptorSets(contextVk, descriptorSetLayout, 1,
696 &mDescriptorSets[descriptorSetIndex]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400697 return vk::NoError();
698}
699
Jamie Madill54164b02017-08-28 15:17:37 -0400700void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
701{
Luc Ferron7cec3352018-03-13 13:29:34 -0400702 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400703}
704
705void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
706{
Luc Ferron7cec3352018-03-13 13:29:34 -0400707 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400708}
709
710void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
711{
712 UNIMPLEMENTED();
713}
714
Jamie Madill76e471e2017-10-21 09:56:01 -0400715vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
716{
Jamie Madill33318de2018-05-01 11:22:54 -0400717 if (!mDefaultUniformBlocks[vk::ShaderType::VertexShader].uniformsDirty &&
718 !mDefaultUniformBlocks[vk::ShaderType::FragmentShader].uniformsDirty)
Jamie Madill76e471e2017-10-21 09:56:01 -0400719 {
720 return vk::NoError();
721 }
722
Jamie Madill8c3988c2017-12-21 14:44:56 -0500723 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400724
Jamie Madill76e471e2017-10-21 09:56:01 -0400725 // Update buffer memory by immediate mapping. This immediate update only works once.
726 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400727 bool anyNewBufferAllocated = false;
Jamie Madill33318de2018-05-01 11:22:54 -0400728 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400729 {
Jamie Madill33318de2018-05-01 11:22:54 -0400730 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400731
Jamie Madill76e471e2017-10-21 09:56:01 -0400732 if (uniformBlock.uniformsDirty)
733 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400734 bool bufferModified = false;
Jamie Madillc3755fc2018-04-05 08:39:13 -0400735 ANGLE_TRY(SyncDefaultUniformBlock(contextVk->getRenderer(), &uniformBlock.storage,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400736 uniformBlock.uniformData,
Jamie Madill33318de2018-05-01 11:22:54 -0400737 &mUniformBlocksOffsets[shaderType], &bufferModified));
Jamie Madill76e471e2017-10-21 09:56:01 -0400738 uniformBlock.uniformsDirty = false;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400739
740 if (bufferModified)
741 {
742 anyNewBufferAllocated = true;
743 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400744 }
745 }
746
Luc Ferron7a06ac12018-03-15 10:17:04 -0400747 if (anyNewBufferAllocated)
748 {
749 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
750 // modify the descriptor sets once initialized.
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400751 ANGLE_TRY(allocateDescriptorSet(contextVk, vk::UniformBufferIndex));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400752 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
753 }
754
Jamie Madill76e471e2017-10-21 09:56:01 -0400755 return vk::NoError();
756}
757
758vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
759{
Jamie Madill33318de2018-05-01 11:22:54 -0400760 vk::ShaderMap<VkDescriptorBufferInfo> descriptorBufferInfo;
761 vk::ShaderMap<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400762
Jamie Madill33318de2018-05-01 11:22:54 -0400763 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400764 {
Jamie Madill33318de2018-05-01 11:22:54 -0400765 auto &uniformBlock = mDefaultUniformBlocks[shaderType];
766 auto &bufferInfo = descriptorBufferInfo[shaderType];
767 auto &writeInfo = writeDescriptorInfo[shaderType];
Jamie Madill76e471e2017-10-21 09:56:01 -0400768
769 if (!uniformBlock.uniformData.empty())
770 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400771 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400772 }
773 else
774 {
775 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
776 }
777
778 bufferInfo.offset = 0;
779 bufferInfo.range = VK_WHOLE_SIZE;
780
Jamie Madill76e471e2017-10-21 09:56:01 -0400781 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
782 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400783 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill33318de2018-05-01 11:22:54 -0400784 writeInfo.dstBinding = static_cast<uint32_t>(shaderType);
Jamie Madill76e471e2017-10-21 09:56:01 -0400785 writeInfo.dstArrayElement = 0;
786 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400787 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400788 writeInfo.pImageInfo = nullptr;
789 writeInfo.pBufferInfo = &bufferInfo;
790 writeInfo.pTexelBufferView = nullptr;
Jamie Madill76e471e2017-10-21 09:56:01 -0400791 }
792
793 VkDevice device = contextVk->getDevice();
794
Jamie Madill33318de2018-05-01 11:22:54 -0400795 vkUpdateDescriptorSets(device, 2, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400796
797 return vk::NoError();
798}
799
Jamie Madill5547b382017-10-23 18:16:01 -0400800const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400801{
Jamie Madill5547b382017-10-23 18:16:01 -0400802 return mDescriptorSets;
803}
804
Luc Ferron7a06ac12018-03-15 10:17:04 -0400805const uint32_t *ProgramVk::getDynamicOffsets()
806{
807 // If we have no descriptor set being used, we do not need to specify any offsets when binding
808 // the descriptor sets.
809 if (!mUsedDescriptorSetRange.contains(0))
810 return nullptr;
811
812 return mUniformBlocksOffsets.data();
813}
814
815uint32_t ProgramVk::getDynamicOffsetsCount()
816{
817 if (!mUsedDescriptorSetRange.contains(0))
818 return 0;
819
820 return static_cast<uint32_t>(mUniformBlocksOffsets.size());
821}
822
Jamie Madill8c3988c2017-12-21 14:44:56 -0500823const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400824{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500825 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400826}
827
Luc Ferron6ea1b412018-03-21 16:13:01 -0400828vk::Error ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
Jamie Madill5547b382017-10-23 18:16:01 -0400829{
830 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
831 {
Luc Ferron6ea1b412018-03-21 16:13:01 -0400832 return vk::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400833 }
834
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400835 ANGLE_TRY(allocateDescriptorSet(contextVk, vk::TextureIndex));
Luc Ferron6ea1b412018-03-21 16:13:01 -0400836
Jamie Madill8c3988c2017-12-21 14:44:56 -0500837 ASSERT(mUsedDescriptorSetRange.contains(1));
838 VkDescriptorSet descriptorSet = mDescriptorSets[1];
Jamie Madill5547b382017-10-23 18:16:01 -0400839
840 // TODO(jmadill): Don't hard-code the texture limit.
841 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
842 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
843 uint32_t imageCount = 0;
844
845 const gl::State &glState = contextVk->getGLState();
846 const auto &completeTextures = glState.getCompleteTextureCache();
847
Jamie Madill858c1cc2018-03-31 14:19:13 -0400848 for (const gl::SamplerBinding &samplerBinding : mState.getSamplerBindings())
Jamie Madill5547b382017-10-23 18:16:01 -0400849 {
850 ASSERT(!samplerBinding.unreferenced);
851
852 // TODO(jmadill): Sampler arrays
853 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
854
855 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
856 const gl::Texture *texture = completeTextures[textureUnit];
857
858 // TODO(jmadill): Incomplete textures handling.
859 ASSERT(texture);
860
Jamie Madille1f3ad42017-10-28 23:00:42 -0400861 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill858c1cc2018-03-31 14:19:13 -0400862 const vk::ImageHelper &image = textureVk->getImage();
Jamie Madill5547b382017-10-23 18:16:01 -0400863
864 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
865
866 imageInfo.sampler = textureVk->getSampler().getHandle();
867 imageInfo.imageView = textureVk->getImageView().getHandle();
868 imageInfo.imageLayout = image.getCurrentLayout();
869
870 auto &writeInfo = writeDescriptorInfo[imageCount];
871
872 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
873 writeInfo.pNext = nullptr;
874 writeInfo.dstSet = descriptorSet;
875 writeInfo.dstBinding = imageCount;
876 writeInfo.dstArrayElement = 0;
877 writeInfo.descriptorCount = 1;
878 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
879 writeInfo.pImageInfo = &imageInfo;
880 writeInfo.pBufferInfo = nullptr;
881 writeInfo.pTexelBufferView = nullptr;
882
883 imageCount++;
884 }
885
886 VkDevice device = contextVk->getDevice();
887
888 ASSERT(imageCount > 0);
889 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
890
891 mDirtyTextures = false;
Luc Ferron6ea1b412018-03-21 16:13:01 -0400892 return vk::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400893}
894
895void ProgramVk::invalidateTextures()
896{
897 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400898}
899
Luc Ferron7a06ac12018-03-15 10:17:04 -0400900void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
901{
902 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
903 {
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400904 block.storage.setMinimumSizeForTesting(minSize);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400905 }
906}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400907} // namespace rx