blob: ab3127125e811ed547cc4558abf9269bda20fe94 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferrondaedf4d2018-03-16 09:28:53 -040016#include "libANGLE/renderer/vulkan/DynamicDescriptorPool.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050017#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
18#include "libANGLE/renderer/vulkan/RendererVk.h"
Luc Ferron7a06ac12018-03-15 10:17:04 -040019#include "libANGLE/renderer/vulkan/StreamingBuffer.h"
Jamie Madill5547b382017-10-23 18:16:01 -040020#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040021
22namespace rx
23{
24
Jamie Madill76e471e2017-10-21 09:56:01 -040025namespace
26{
27
Luc Ferron7a06ac12018-03-15 10:17:04 -040028constexpr size_t kUniformBlockStreamingBufferMinSize = 256 * 128;
29
Jamie Madill76e471e2017-10-21 09:56:01 -040030gl::Error InitDefaultUniformBlock(const gl::Context *context,
Jamie Madill76e471e2017-10-21 09:56:01 -040031 gl::Shader *shader,
Jamie Madill76e471e2017-10-21 09:56:01 -040032 sh::BlockLayoutMap *blockLayoutMapOut,
Luc Ferron7a06ac12018-03-15 10:17:04 -040033 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040034{
35 const auto &uniforms = shader->getUniforms(context);
36
37 if (uniforms.empty())
38 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040039 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040040 return gl::NoError();
41 }
42
43 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020044 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040045
46 size_t blockSize = blockEncoder.getBlockSize();
47
48 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
49 if (blockSize == 0)
50 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040051 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040052 return gl::NoError();
53 }
54
Luc Ferron7a06ac12018-03-15 10:17:04 -040055 *blockSizeOut = blockSize;
Jamie Madill76e471e2017-10-21 09:56:01 -040056 return gl::NoError();
57}
58
59template <typename T>
60void UpdateDefaultUniformBlock(GLsizei count,
61 int componentCount,
62 const T *v,
63 const sh::BlockMemberInfo &layoutInfo,
64 angle::MemoryBuffer *uniformData)
65{
Jamie Madill76e471e2017-10-21 09:56:01 -040066 int elementSize = sizeof(T) * componentCount;
67 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
68 {
69 uint8_t *writePtr = uniformData->data() + layoutInfo.offset;
70 memcpy(writePtr, v, elementSize * count);
71 }
72 else
73 {
74 UNIMPLEMENTED();
75 }
76}
77
Luc Ferron7cec3352018-03-13 13:29:34 -040078template <typename T>
79void ReadFromDefaultUniformBlock(int componentCount,
80 T *dst,
81 const sh::BlockMemberInfo &layoutInfo,
82 const angle::MemoryBuffer *uniformData)
83{
84 ASSERT(layoutInfo.offset != -1);
85
86 int elementSize = sizeof(T) * componentCount;
87 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
88 {
89 const uint8_t *readPtr = uniformData->data() + layoutInfo.offset;
90 memcpy(dst, readPtr, elementSize);
91 }
92 else
93 {
94 UNIMPLEMENTED();
95 }
96}
97
Luc Ferron7a06ac12018-03-15 10:17:04 -040098vk::Error SyncDefaultUniformBlock(ContextVk *contextVk,
99 StreamingBuffer &streamingBuffer,
100 const angle::MemoryBuffer &bufferData,
101 uint32_t *outOffset,
102 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400103{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400104 ASSERT(!bufferData.empty());
105 uint8_t *data = nullptr;
106 VkBuffer *outBuffer = nullptr;
107 uint32_t offset;
108 ANGLE_TRY(streamingBuffer.allocate(contextVk, bufferData.size(), &data, outBuffer, &offset,
109 outBufferModified));
110 *outOffset = offset;
111 memcpy(data, bufferData.data(), bufferData.size());
112 ANGLE_TRY(streamingBuffer.flush(contextVk));
Jamie Madill76e471e2017-10-21 09:56:01 -0400113 return vk::NoError();
114}
115
Jiawei Shao385b3e02018-03-21 09:43:28 +0800116// TODO(jiawei.shao@intel.com): Fully remove this enum by gl::ShaderType. (BUG=angleproject:2169)
Jamie Madill76e471e2017-10-21 09:56:01 -0400117enum ShaderIndex : uint32_t
118{
119 MinShaderIndex = 0,
120 VertexShader = MinShaderIndex,
121 FragmentShader = 1,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400122 MaxShaderIndex = kShaderTypeCount,
Jamie Madill76e471e2017-10-21 09:56:01 -0400123};
124
125gl::Shader *GetShader(const gl::ProgramState &programState, uint32_t shaderIndex)
126{
127 switch (shaderIndex)
128 {
129 case VertexShader:
Jiawei Shao385b3e02018-03-21 09:43:28 +0800130 return programState.getAttachedShader(gl::ShaderType::Vertex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400131 case FragmentShader:
Jiawei Shao385b3e02018-03-21 09:43:28 +0800132 return programState.getAttachedShader(gl::ShaderType::Fragment);
Jamie Madill76e471e2017-10-21 09:56:01 -0400133 default:
134 UNREACHABLE();
135 return nullptr;
136 }
137}
138
139} // anonymous namespace
140
141ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400142 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
143 kUniformBlockStreamingBufferMinSize),
144 uniformData(),
145 uniformsDirty(false),
146 uniformLayout()
Jamie Madill76e471e2017-10-21 09:56:01 -0400147{
148}
149
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500150ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
151{
152}
153
Jamie Madill76e471e2017-10-21 09:56:01 -0400154ProgramVk::ProgramVk(const gl::ProgramState &state)
Luc Ferron7a06ac12018-03-15 10:17:04 -0400155 : ProgramImpl(state),
156 mDefaultUniformBlocks(),
157 mUniformBlocksOffsets(),
158 mUsedDescriptorSetRange(),
159 mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400160{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400161 mUniformBlocksOffsets.fill(0);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500162 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400163}
164
165ProgramVk::~ProgramVk()
166{
167}
168
Jamie Madillb7d924a2018-03-10 11:16:54 -0500169gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500170{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500171 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500172 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400173}
Jamie Madill5deea722017-02-16 10:44:46 -0500174
Jamie Madillb7d924a2018-03-10 11:16:54 -0500175vk::Error ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400176{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500177 // TODO(jmadill): Handle re-linking a program that is in-use. http://anglebug.com/2397
178
179 VkDevice device = contextVk->getDevice();
180
Jamie Madill76e471e2017-10-21 09:56:01 -0400181 for (auto &uniformBlock : mDefaultUniformBlocks)
182 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400183 uniformBlock.storage.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -0400184 }
185
186 mEmptyUniformBlockStorage.memory.destroy(device);
187 mEmptyUniformBlockStorage.buffer.destroy(device);
188
Jamie Madill5deea722017-02-16 10:44:46 -0500189 mLinkedFragmentModule.destroy(device);
190 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500191 mVertexModuleSerial = Serial();
192 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400193
Jamie Madill5547b382017-10-23 18:16:01 -0400194 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500195 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400196 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500197
198 return vk::NoError();
Jamie Madill5deea722017-02-16 10:44:46 -0500199}
200
Jamie Madill9cf9e872017-06-05 12:59:25 -0400201gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
202 gl::InfoLog &infoLog,
203 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400204{
205 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500206 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400207}
208
Jamie Madill27a60632017-06-30 15:12:01 -0400209void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400210{
211 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400212}
213
214void ProgramVk::setBinaryRetrievableHint(bool retrievable)
215{
216 UNIMPLEMENTED();
217}
218
Yunchao He61afff12017-03-14 15:34:03 +0800219void ProgramVk::setSeparable(bool separable)
220{
221 UNIMPLEMENTED();
222}
223
Jamie Madill9cf9e872017-06-05 12:59:25 -0400224gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500225 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400226 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400227{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400228 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400229 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500230 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400231 VkDevice device = renderer->getDevice();
232
Jamie Madillb7d924a2018-03-10 11:16:54 -0500233 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500234
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500235 std::vector<uint32_t> vertexCode;
236 std::vector<uint32_t> fragmentCode;
237 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500238 ANGLE_TRY_RESULT(
239 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
240 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500241 if (!linkSuccess)
242 {
243 return false;
244 }
245
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500246 {
247 VkShaderModuleCreateInfo vertexShaderInfo;
248 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
249 vertexShaderInfo.pNext = nullptr;
250 vertexShaderInfo.flags = 0;
251 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
252 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400253
254 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500255 mVertexModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500256 }
257
258 {
259 VkShaderModuleCreateInfo fragmentShaderInfo;
260 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
261 fragmentShaderInfo.pNext = nullptr;
262 fragmentShaderInfo.flags = 0;
263 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
264 fragmentShaderInfo.pCode = fragmentCode.data();
265
Jamie Madillc5143482017-10-15 20:20:06 -0400266 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500267 mFragmentModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500268 }
269
Luc Ferron7a06ac12018-03-15 10:17:04 -0400270 ANGLE_TRY(allocateDescriptorSets(contextVk));
Jamie Madill76e471e2017-10-21 09:56:01 -0400271 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500272
Jamie Madill8c3988c2017-12-21 14:44:56 -0500273 if (!mState.getSamplerUniformRange().empty())
274 {
275 // Ensure the descriptor set range includes the textures at position 1.
276 mUsedDescriptorSetRange.extend(1);
277 mDirtyTextures = true;
278 }
279
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500280 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400281}
282
Jamie Madill76e471e2017-10-21 09:56:01 -0400283gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
284{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400285 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500286 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400287 VkDevice device = contextVk->getDevice();
288
289 // Process vertex and fragment uniforms into std140 packing.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400290 std::array<sh::BlockLayoutMap, MaxShaderIndex> layoutMap;
291 std::array<size_t, MaxShaderIndex> requiredBufferSize = {{0, 0}};
Jamie Madill76e471e2017-10-21 09:56:01 -0400292
293 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
294 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400295 ANGLE_TRY(InitDefaultUniformBlock(glContext, GetShader(mState, shaderIndex),
Jamie Madill76e471e2017-10-21 09:56:01 -0400296 &layoutMap[shaderIndex],
297 &requiredBufferSize[shaderIndex]));
298 }
299
300 // Init the default block layout info.
301 const auto &locations = mState.getUniformLocations();
302 const auto &uniforms = mState.getUniforms();
303 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
304 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400305 std::array<sh::BlockMemberInfo, MaxShaderIndex> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400306
307 const auto &location = locations[locationIndex];
308 if (location.used() && !location.ignored)
309 {
Jamie Madillde03e002017-10-21 14:04:20 -0400310 const auto &uniform = uniforms[location.index];
311
312 if (uniform.isSampler())
313 continue;
314
Jamie Madill76e471e2017-10-21 09:56:01 -0400315 std::string uniformName = uniform.name;
316 if (uniform.isArray())
317 {
Olli Etuaho1734e172017-10-27 15:30:27 +0300318 uniformName += ArrayString(location.arrayIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400319 }
320
321 bool found = false;
322
323 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
324 {
325 auto it = layoutMap[shaderIndex].find(uniformName);
326 if (it != layoutMap[shaderIndex].end())
327 {
328 found = true;
329 layoutInfo[shaderIndex] = it->second;
330 }
331 }
332
333 ASSERT(found);
334 }
335
336 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
337 {
338 mDefaultUniformBlocks[shaderIndex].uniformLayout.push_back(layoutInfo[shaderIndex]);
339 }
340 }
341
342 bool anyDirty = false;
343 bool allDirty = true;
344
345 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
346 {
347 if (requiredBufferSize[shaderIndex] > 0)
348 {
349 if (!mDefaultUniformBlocks[shaderIndex].uniformData.resize(
350 requiredBufferSize[shaderIndex]))
351 {
352 return gl::OutOfMemory() << "Memory allocation failure.";
353 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400354 size_t minAlignment = static_cast<size_t>(
355 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
356
357 mDefaultUniformBlocks[shaderIndex].storage.init(minAlignment);
358
359 // Initialize uniform buffer memory to zero by default.
Jamie Madill76e471e2017-10-21 09:56:01 -0400360 mDefaultUniformBlocks[shaderIndex].uniformData.fill(0);
361 mDefaultUniformBlocks[shaderIndex].uniformsDirty = true;
362
363 anyDirty = true;
364 }
365 else
366 {
367 allDirty = false;
368 }
369 }
370
371 if (anyDirty)
372 {
373 // Initialize the "empty" uniform block if necessary.
374 if (!allDirty)
375 {
376 VkBufferCreateInfo uniformBufferInfo;
377 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
378 uniformBufferInfo.pNext = nullptr;
379 uniformBufferInfo.flags = 0;
380 uniformBufferInfo.size = 1;
381 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
382 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
383 uniformBufferInfo.queueFamilyIndexCount = 0;
384 uniformBufferInfo.pQueueFamilyIndices = nullptr;
385
386 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
387
Luc Ferron7a06ac12018-03-15 10:17:04 -0400388 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500389 VkMemoryPropertyFlags flags =
390 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400391 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500392 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400393 &mEmptyUniformBlockStorage.memory, &requiredSize));
394 }
395
Jamie Madill8c3988c2017-12-21 14:44:56 -0500396 // Ensure the descriptor set range includes the uniform buffers at position 0.
397 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400398 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400399
400 return gl::NoError();
401}
402
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400403GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
404{
405 UNIMPLEMENTED();
406 return GLboolean();
407}
408
Jamie Madill76e471e2017-10-21 09:56:01 -0400409template <typename T>
410void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
411{
412 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
413 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
414
Luc Ferron7cec3352018-03-13 13:29:34 -0400415 if (linkedUniform.isSampler())
416 {
417 UNIMPLEMENTED();
418 return;
419 }
420
Jamie Madill76e471e2017-10-21 09:56:01 -0400421 if (linkedUniform.type == entryPointType)
422 {
423 for (auto &uniformBlock : mDefaultUniformBlocks)
424 {
425 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Jamie Madill5eaddbd2018-03-14 16:16:03 -0400426
427 // Assume an offset of -1 means the block is unused.
428 if (layoutInfo.offset == -1)
429 {
430 continue;
431 }
432
Jamie Madill76e471e2017-10-21 09:56:01 -0400433 UpdateDefaultUniformBlock(count, linkedUniform.typeInfo->componentCount, v, layoutInfo,
434 &uniformBlock.uniformData);
Jamie Madill5eaddbd2018-03-14 16:16:03 -0400435
436 uniformBlock.uniformsDirty = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400437 }
438 }
439 else
440 {
441 ASSERT(linkedUniform.type == gl::VariableBoolVectorType(entryPointType));
442 UNIMPLEMENTED();
443 }
444}
445
Luc Ferron7cec3352018-03-13 13:29:34 -0400446template <typename T>
447void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
448{
449 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
450 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
451
452 if (linkedUniform.isSampler())
453 {
454 UNIMPLEMENTED();
455 return;
456 }
457
458 ASSERT(linkedUniform.typeInfo->componentType == entryPointType);
Olli Etuaho107c7242018-03-20 15:45:35 +0200459 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800460 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400461
Jiawei Shao385b3e02018-03-21 09:43:28 +0800462 const DefaultUniformBlock &uniformBlock =
463 mDefaultUniformBlocks[static_cast<GLuint>(shaderType)];
Luc Ferron7cec3352018-03-13 13:29:34 -0400464 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
465 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, v, layoutInfo,
466 &uniformBlock.uniformData);
467}
468
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400469void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
470{
Jamie Madill76e471e2017-10-21 09:56:01 -0400471 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400472}
473
474void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
475{
Jamie Madill76e471e2017-10-21 09:56:01 -0400476 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400477}
478
479void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
480{
Jamie Madill76e471e2017-10-21 09:56:01 -0400481 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400482}
483
484void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
485{
Jamie Madill76e471e2017-10-21 09:56:01 -0400486 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400487}
488
489void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
490{
Luc Ferron7cec3352018-03-13 13:29:34 -0400491 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400492}
493
494void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
495{
496 UNIMPLEMENTED();
497}
498
499void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
500{
501 UNIMPLEMENTED();
502}
503
504void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
505{
506 UNIMPLEMENTED();
507}
508
509void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
510{
511 UNIMPLEMENTED();
512}
513
514void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
515{
516 UNIMPLEMENTED();
517}
518
519void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
520{
521 UNIMPLEMENTED();
522}
523
524void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
525{
526 UNIMPLEMENTED();
527}
528
529void ProgramVk::setUniformMatrix2fv(GLint location,
530 GLsizei count,
531 GLboolean transpose,
532 const GLfloat *value)
533{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400534 if (transpose == GL_TRUE)
535 {
536 UNIMPLEMENTED();
537 return;
538 }
539
540 setUniformImpl(location, count, value, GL_FLOAT_MAT2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400541}
542
543void ProgramVk::setUniformMatrix3fv(GLint location,
544 GLsizei count,
545 GLboolean transpose,
546 const GLfloat *value)
547{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400548 if (transpose == GL_TRUE)
549 {
550 UNIMPLEMENTED();
551 return;
552 }
553 setUniformImpl(location, count, value, GL_FLOAT_MAT3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400554}
555
556void ProgramVk::setUniformMatrix4fv(GLint location,
557 GLsizei count,
558 GLboolean transpose,
559 const GLfloat *value)
560{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400561 if (transpose == GL_TRUE)
562 {
563 UNIMPLEMENTED();
564 return;
565 }
566
567 setUniformImpl(location, count, value, GL_FLOAT_MAT4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400568}
569
570void ProgramVk::setUniformMatrix2x3fv(GLint location,
571 GLsizei count,
572 GLboolean transpose,
573 const GLfloat *value)
574{
575 UNIMPLEMENTED();
576}
577
578void ProgramVk::setUniformMatrix3x2fv(GLint location,
579 GLsizei count,
580 GLboolean transpose,
581 const GLfloat *value)
582{
583 UNIMPLEMENTED();
584}
585
586void ProgramVk::setUniformMatrix2x4fv(GLint location,
587 GLsizei count,
588 GLboolean transpose,
589 const GLfloat *value)
590{
591 UNIMPLEMENTED();
592}
593
594void ProgramVk::setUniformMatrix4x2fv(GLint location,
595 GLsizei count,
596 GLboolean transpose,
597 const GLfloat *value)
598{
599 UNIMPLEMENTED();
600}
601
602void ProgramVk::setUniformMatrix3x4fv(GLint location,
603 GLsizei count,
604 GLboolean transpose,
605 const GLfloat *value)
606{
607 UNIMPLEMENTED();
608}
609
610void ProgramVk::setUniformMatrix4x3fv(GLint location,
611 GLsizei count,
612 GLboolean transpose,
613 const GLfloat *value)
614{
615 UNIMPLEMENTED();
616}
617
618void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
619{
620 UNIMPLEMENTED();
621}
622
Sami Väisänen46eaa942016-06-29 10:26:37 +0300623void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
624 GLenum genMode,
625 GLint components,
626 const GLfloat *coeffs)
627{
628 UNIMPLEMENTED();
629}
630
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500631const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
632{
633 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
634 return mLinkedVertexModule;
635}
636
Jamie Madillf2f6d372018-01-10 21:37:23 -0500637Serial ProgramVk::getVertexModuleSerial() const
638{
639 return mVertexModuleSerial;
640}
641
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500642const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
643{
644 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
645 return mLinkedFragmentModule;
646}
647
Jamie Madillf2f6d372018-01-10 21:37:23 -0500648Serial ProgramVk::getFragmentModuleSerial() const
649{
650 return mFragmentModuleSerial;
651}
652
Luc Ferron7a06ac12018-03-15 10:17:04 -0400653vk::Error ProgramVk::allocateDescriptorSets(ContextVk *contextVk)
Jamie Madill76e471e2017-10-21 09:56:01 -0400654{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500655 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400656
657 // Write out to a new a descriptor set.
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400658 DynamicDescriptorPool *dynamicDescriptorPool = contextVk->getDynamicDescriptorPool();
Jamie Madill76e471e2017-10-21 09:56:01 -0400659
Jamie Madill8c3988c2017-12-21 14:44:56 -0500660 const auto &descriptorSetLayouts = renderer->getGraphicsDescriptorSetLayouts();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500661 uint32_t descriptorSetCount = static_cast<uint32_t>(descriptorSetLayouts.size());
Jamie Madill76e471e2017-10-21 09:56:01 -0400662
Jamie Madill5547b382017-10-23 18:16:01 -0400663 mDescriptorSets.resize(descriptorSetCount, VK_NULL_HANDLE);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400664
665 // TODO(lucferron): Its wasteful to reallocate the texture descriptor sets when we only
666 // care about the uniforms.
667 // http://anglebug.com/2421
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400668 ANGLE_TRY(dynamicDescriptorPool->allocateDescriptorSets(
669 contextVk, descriptorSetLayouts[0].ptr(), descriptorSetCount, &mDescriptorSets[0]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400670 return vk::NoError();
671}
672
Jamie Madill54164b02017-08-28 15:17:37 -0400673void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
674{
Luc Ferron7cec3352018-03-13 13:29:34 -0400675 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400676}
677
678void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
679{
Luc Ferron7cec3352018-03-13 13:29:34 -0400680 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400681}
682
683void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
684{
685 UNIMPLEMENTED();
686}
687
Jamie Madill76e471e2017-10-21 09:56:01 -0400688vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
689{
690 if (!mDefaultUniformBlocks[VertexShader].uniformsDirty &&
691 !mDefaultUniformBlocks[FragmentShader].uniformsDirty)
692 {
693 return vk::NoError();
694 }
695
Jamie Madill8c3988c2017-12-21 14:44:56 -0500696 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400697
Jamie Madill76e471e2017-10-21 09:56:01 -0400698 // Update buffer memory by immediate mapping. This immediate update only works once.
699 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400700 bool anyNewBufferAllocated = false;
701 for (size_t index = 0; index < mDefaultUniformBlocks.size(); index++)
Jamie Madill76e471e2017-10-21 09:56:01 -0400702 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400703 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[index];
704
Jamie Madill76e471e2017-10-21 09:56:01 -0400705 if (uniformBlock.uniformsDirty)
706 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400707 bool bufferModified = false;
708 ANGLE_TRY(SyncDefaultUniformBlock(contextVk, uniformBlock.storage,
709 uniformBlock.uniformData,
710 &mUniformBlocksOffsets[index], &bufferModified));
Jamie Madill76e471e2017-10-21 09:56:01 -0400711 uniformBlock.uniformsDirty = false;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400712
713 if (bufferModified)
714 {
715 anyNewBufferAllocated = true;
716 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400717 }
718 }
719
Luc Ferron7a06ac12018-03-15 10:17:04 -0400720 if (anyNewBufferAllocated)
721 {
722 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
723 // modify the descriptor sets once initialized.
724 ANGLE_TRY(allocateDescriptorSets(contextVk));
725 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
726 }
727
Jamie Madill76e471e2017-10-21 09:56:01 -0400728 return vk::NoError();
729}
730
731vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
732{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400733 std::array<VkDescriptorBufferInfo, MaxShaderIndex> descriptorBufferInfo;
734 std::array<VkWriteDescriptorSet, MaxShaderIndex> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400735 uint32_t bufferCount = 0;
736
737 for (auto &uniformBlock : mDefaultUniformBlocks)
738 {
739 auto &bufferInfo = descriptorBufferInfo[bufferCount];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400740 auto &writeInfo = writeDescriptorInfo[bufferCount];
Jamie Madill76e471e2017-10-21 09:56:01 -0400741
742 if (!uniformBlock.uniformData.empty())
743 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400744 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400745 }
746 else
747 {
748 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
749 }
750
751 bufferInfo.offset = 0;
752 bufferInfo.range = VK_WHOLE_SIZE;
753
Jamie Madill76e471e2017-10-21 09:56:01 -0400754 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
755 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400756 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill76e471e2017-10-21 09:56:01 -0400757 writeInfo.dstBinding = bufferCount;
758 writeInfo.dstArrayElement = 0;
759 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400760 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400761 writeInfo.pImageInfo = nullptr;
762 writeInfo.pBufferInfo = &bufferInfo;
763 writeInfo.pTexelBufferView = nullptr;
764
765 bufferCount++;
766 }
767
768 VkDevice device = contextVk->getDevice();
769
770 vkUpdateDescriptorSets(device, bufferCount, writeDescriptorInfo.data(), 0, nullptr);
771
772 return vk::NoError();
773}
774
Jamie Madill5547b382017-10-23 18:16:01 -0400775const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400776{
Jamie Madill5547b382017-10-23 18:16:01 -0400777 return mDescriptorSets;
778}
779
Luc Ferron7a06ac12018-03-15 10:17:04 -0400780const uint32_t *ProgramVk::getDynamicOffsets()
781{
782 // If we have no descriptor set being used, we do not need to specify any offsets when binding
783 // the descriptor sets.
784 if (!mUsedDescriptorSetRange.contains(0))
785 return nullptr;
786
787 return mUniformBlocksOffsets.data();
788}
789
790uint32_t ProgramVk::getDynamicOffsetsCount()
791{
792 if (!mUsedDescriptorSetRange.contains(0))
793 return 0;
794
795 return static_cast<uint32_t>(mUniformBlocksOffsets.size());
796}
797
Jamie Madill8c3988c2017-12-21 14:44:56 -0500798const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400799{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500800 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400801}
802
803void ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
804{
805 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
806 {
807 return;
808 }
809
Jamie Madill8c3988c2017-12-21 14:44:56 -0500810 ASSERT(mUsedDescriptorSetRange.contains(1));
811 VkDescriptorSet descriptorSet = mDescriptorSets[1];
Jamie Madill5547b382017-10-23 18:16:01 -0400812
813 // TODO(jmadill): Don't hard-code the texture limit.
814 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
815 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
816 uint32_t imageCount = 0;
817
818 const gl::State &glState = contextVk->getGLState();
819 const auto &completeTextures = glState.getCompleteTextureCache();
820
821 for (const auto &samplerBinding : mState.getSamplerBindings())
822 {
823 ASSERT(!samplerBinding.unreferenced);
824
825 // TODO(jmadill): Sampler arrays
826 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
827
828 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
829 const gl::Texture *texture = completeTextures[textureUnit];
830
831 // TODO(jmadill): Incomplete textures handling.
832 ASSERT(texture);
833
Jamie Madille1f3ad42017-10-28 23:00:42 -0400834 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill5547b382017-10-23 18:16:01 -0400835 const vk::Image &image = textureVk->getImage();
836
837 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
838
839 imageInfo.sampler = textureVk->getSampler().getHandle();
840 imageInfo.imageView = textureVk->getImageView().getHandle();
841 imageInfo.imageLayout = image.getCurrentLayout();
842
843 auto &writeInfo = writeDescriptorInfo[imageCount];
844
845 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
846 writeInfo.pNext = nullptr;
847 writeInfo.dstSet = descriptorSet;
848 writeInfo.dstBinding = imageCount;
849 writeInfo.dstArrayElement = 0;
850 writeInfo.descriptorCount = 1;
851 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
852 writeInfo.pImageInfo = &imageInfo;
853 writeInfo.pBufferInfo = nullptr;
854 writeInfo.pTexelBufferView = nullptr;
855
856 imageCount++;
857 }
858
859 VkDevice device = contextVk->getDevice();
860
861 ASSERT(imageCount > 0);
862 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
863
864 mDirtyTextures = false;
865}
866
867void ProgramVk::invalidateTextures()
868{
869 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400870}
871
Luc Ferron7a06ac12018-03-15 10:17:04 -0400872void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
873{
874 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
875 {
876 block.storage.setMinimumSize(minSize);
877 }
878}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400879} // namespace rx