blob: ad577b0c1225165ef5a776c964cd782207a102cc [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferrondaedf4d2018-03-16 09:28:53 -040016#include "libANGLE/renderer/vulkan/DynamicDescriptorPool.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050017#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
18#include "libANGLE/renderer/vulkan/RendererVk.h"
Luc Ferron7a06ac12018-03-15 10:17:04 -040019#include "libANGLE/renderer/vulkan/StreamingBuffer.h"
Jamie Madill5547b382017-10-23 18:16:01 -040020#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040021
22namespace rx
23{
24
Jamie Madill76e471e2017-10-21 09:56:01 -040025namespace
26{
27
Luc Ferron7a06ac12018-03-15 10:17:04 -040028constexpr size_t kUniformBlockStreamingBufferMinSize = 256 * 128;
29
Jamie Madill76e471e2017-10-21 09:56:01 -040030gl::Error InitDefaultUniformBlock(const gl::Context *context,
Jamie Madill76e471e2017-10-21 09:56:01 -040031 gl::Shader *shader,
Jamie Madill76e471e2017-10-21 09:56:01 -040032 sh::BlockLayoutMap *blockLayoutMapOut,
Luc Ferron7a06ac12018-03-15 10:17:04 -040033 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040034{
35 const auto &uniforms = shader->getUniforms(context);
36
37 if (uniforms.empty())
38 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040039 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040040 return gl::NoError();
41 }
42
43 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020044 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040045
46 size_t blockSize = blockEncoder.getBlockSize();
47
48 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
49 if (blockSize == 0)
50 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040051 *blockSizeOut = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -040052 return gl::NoError();
53 }
54
Luc Ferron7a06ac12018-03-15 10:17:04 -040055 *blockSizeOut = blockSize;
Jamie Madill76e471e2017-10-21 09:56:01 -040056 return gl::NoError();
57}
58
59template <typename T>
60void UpdateDefaultUniformBlock(GLsizei count,
61 int componentCount,
62 const T *v,
63 const sh::BlockMemberInfo &layoutInfo,
64 angle::MemoryBuffer *uniformData)
65{
Jamie Madill76e471e2017-10-21 09:56:01 -040066 int elementSize = sizeof(T) * componentCount;
67 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
68 {
69 uint8_t *writePtr = uniformData->data() + layoutInfo.offset;
70 memcpy(writePtr, v, elementSize * count);
71 }
72 else
73 {
74 UNIMPLEMENTED();
75 }
76}
77
Luc Ferron7cec3352018-03-13 13:29:34 -040078template <typename T>
79void ReadFromDefaultUniformBlock(int componentCount,
80 T *dst,
81 const sh::BlockMemberInfo &layoutInfo,
82 const angle::MemoryBuffer *uniformData)
83{
84 ASSERT(layoutInfo.offset != -1);
85
86 int elementSize = sizeof(T) * componentCount;
87 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
88 {
89 const uint8_t *readPtr = uniformData->data() + layoutInfo.offset;
90 memcpy(dst, readPtr, elementSize);
91 }
92 else
93 {
94 UNIMPLEMENTED();
95 }
96}
97
Luc Ferron7a06ac12018-03-15 10:17:04 -040098vk::Error SyncDefaultUniformBlock(ContextVk *contextVk,
99 StreamingBuffer &streamingBuffer,
100 const angle::MemoryBuffer &bufferData,
101 uint32_t *outOffset,
102 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400103{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400104 ASSERT(!bufferData.empty());
105 uint8_t *data = nullptr;
106 VkBuffer *outBuffer = nullptr;
107 uint32_t offset;
108 ANGLE_TRY(streamingBuffer.allocate(contextVk, bufferData.size(), &data, outBuffer, &offset,
109 outBufferModified));
110 *outOffset = offset;
111 memcpy(data, bufferData.data(), bufferData.size());
112 ANGLE_TRY(streamingBuffer.flush(contextVk));
Jamie Madill76e471e2017-10-21 09:56:01 -0400113 return vk::NoError();
114}
115
Jiawei Shao385b3e02018-03-21 09:43:28 +0800116// TODO(jiawei.shao@intel.com): Fully remove this enum by gl::ShaderType. (BUG=angleproject:2169)
Jamie Madill76e471e2017-10-21 09:56:01 -0400117enum ShaderIndex : uint32_t
118{
119 MinShaderIndex = 0,
120 VertexShader = MinShaderIndex,
121 FragmentShader = 1,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400122 MaxShaderIndex = kShaderTypeCount,
Jamie Madill76e471e2017-10-21 09:56:01 -0400123};
124
125gl::Shader *GetShader(const gl::ProgramState &programState, uint32_t shaderIndex)
126{
127 switch (shaderIndex)
128 {
129 case VertexShader:
Jiawei Shao385b3e02018-03-21 09:43:28 +0800130 return programState.getAttachedShader(gl::ShaderType::Vertex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400131 case FragmentShader:
Jiawei Shao385b3e02018-03-21 09:43:28 +0800132 return programState.getAttachedShader(gl::ShaderType::Fragment);
Jamie Madill76e471e2017-10-21 09:56:01 -0400133 default:
134 UNREACHABLE();
135 return nullptr;
136 }
137}
138
139} // anonymous namespace
140
141ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400142 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
143 kUniformBlockStreamingBufferMinSize),
144 uniformData(),
145 uniformsDirty(false),
146 uniformLayout()
Jamie Madill76e471e2017-10-21 09:56:01 -0400147{
148}
149
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500150ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
151{
152}
153
Jamie Madill76e471e2017-10-21 09:56:01 -0400154ProgramVk::ProgramVk(const gl::ProgramState &state)
Luc Ferron7a06ac12018-03-15 10:17:04 -0400155 : ProgramImpl(state),
156 mDefaultUniformBlocks(),
157 mUniformBlocksOffsets(),
158 mUsedDescriptorSetRange(),
159 mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400160{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400161 mUniformBlocksOffsets.fill(0);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500162 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400163}
164
165ProgramVk::~ProgramVk()
166{
167}
168
Jamie Madillb7d924a2018-03-10 11:16:54 -0500169gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500170{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500171 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500172 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400173}
Jamie Madill5deea722017-02-16 10:44:46 -0500174
Jamie Madillb7d924a2018-03-10 11:16:54 -0500175vk::Error ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400176{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500177 // TODO(jmadill): Handle re-linking a program that is in-use. http://anglebug.com/2397
178
179 VkDevice device = contextVk->getDevice();
180
Jamie Madill76e471e2017-10-21 09:56:01 -0400181 for (auto &uniformBlock : mDefaultUniformBlocks)
182 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400183 uniformBlock.storage.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -0400184 }
185
186 mEmptyUniformBlockStorage.memory.destroy(device);
187 mEmptyUniformBlockStorage.buffer.destroy(device);
188
Jamie Madill5deea722017-02-16 10:44:46 -0500189 mLinkedFragmentModule.destroy(device);
190 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500191 mVertexModuleSerial = Serial();
192 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400193
Jamie Madill5547b382017-10-23 18:16:01 -0400194 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500195 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400196 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500197
198 return vk::NoError();
Jamie Madill5deea722017-02-16 10:44:46 -0500199}
200
Jamie Madill9cf9e872017-06-05 12:59:25 -0400201gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
202 gl::InfoLog &infoLog,
203 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400204{
205 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500206 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400207}
208
Jamie Madill27a60632017-06-30 15:12:01 -0400209void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400210{
211 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400212}
213
214void ProgramVk::setBinaryRetrievableHint(bool retrievable)
215{
216 UNIMPLEMENTED();
217}
218
Yunchao He61afff12017-03-14 15:34:03 +0800219void ProgramVk::setSeparable(bool separable)
220{
221 UNIMPLEMENTED();
222}
223
Jamie Madill9cf9e872017-06-05 12:59:25 -0400224gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500225 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400226 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400227{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400228 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400229 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500230 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400231 VkDevice device = renderer->getDevice();
232
Jamie Madillb7d924a2018-03-10 11:16:54 -0500233 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500234
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500235 std::vector<uint32_t> vertexCode;
236 std::vector<uint32_t> fragmentCode;
237 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500238 ANGLE_TRY_RESULT(
239 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
240 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500241 if (!linkSuccess)
242 {
243 return false;
244 }
245
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500246 {
247 VkShaderModuleCreateInfo vertexShaderInfo;
248 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
249 vertexShaderInfo.pNext = nullptr;
250 vertexShaderInfo.flags = 0;
251 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
252 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400253
254 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500255 mVertexModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500256 }
257
258 {
259 VkShaderModuleCreateInfo fragmentShaderInfo;
260 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
261 fragmentShaderInfo.pNext = nullptr;
262 fragmentShaderInfo.flags = 0;
263 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
264 fragmentShaderInfo.pCode = fragmentCode.data();
265
Jamie Madillc5143482017-10-15 20:20:06 -0400266 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500267 mFragmentModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500268 }
269
Jamie Madill76e471e2017-10-21 09:56:01 -0400270 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500271
Jamie Madill8c3988c2017-12-21 14:44:56 -0500272 if (!mState.getSamplerUniformRange().empty())
273 {
274 // Ensure the descriptor set range includes the textures at position 1.
275 mUsedDescriptorSetRange.extend(1);
276 mDirtyTextures = true;
277 }
278
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500279 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400280}
281
Jamie Madill76e471e2017-10-21 09:56:01 -0400282gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
283{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400284 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500285 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400286 VkDevice device = contextVk->getDevice();
287
288 // Process vertex and fragment uniforms into std140 packing.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400289 std::array<sh::BlockLayoutMap, MaxShaderIndex> layoutMap;
290 std::array<size_t, MaxShaderIndex> requiredBufferSize = {{0, 0}};
Jamie Madill76e471e2017-10-21 09:56:01 -0400291
292 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
293 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400294 ANGLE_TRY(InitDefaultUniformBlock(glContext, GetShader(mState, shaderIndex),
Jamie Madill76e471e2017-10-21 09:56:01 -0400295 &layoutMap[shaderIndex],
296 &requiredBufferSize[shaderIndex]));
297 }
298
299 // Init the default block layout info.
300 const auto &locations = mState.getUniformLocations();
301 const auto &uniforms = mState.getUniforms();
302 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
303 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400304 std::array<sh::BlockMemberInfo, MaxShaderIndex> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400305
306 const auto &location = locations[locationIndex];
307 if (location.used() && !location.ignored)
308 {
Jamie Madillde03e002017-10-21 14:04:20 -0400309 const auto &uniform = uniforms[location.index];
310
311 if (uniform.isSampler())
312 continue;
313
Jamie Madill76e471e2017-10-21 09:56:01 -0400314 std::string uniformName = uniform.name;
315 if (uniform.isArray())
316 {
Olli Etuaho1734e172017-10-27 15:30:27 +0300317 uniformName += ArrayString(location.arrayIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400318 }
319
320 bool found = false;
321
322 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
323 {
324 auto it = layoutMap[shaderIndex].find(uniformName);
325 if (it != layoutMap[shaderIndex].end())
326 {
327 found = true;
328 layoutInfo[shaderIndex] = it->second;
329 }
330 }
331
332 ASSERT(found);
333 }
334
335 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
336 {
337 mDefaultUniformBlocks[shaderIndex].uniformLayout.push_back(layoutInfo[shaderIndex]);
338 }
339 }
340
341 bool anyDirty = false;
342 bool allDirty = true;
343
344 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
345 {
346 if (requiredBufferSize[shaderIndex] > 0)
347 {
348 if (!mDefaultUniformBlocks[shaderIndex].uniformData.resize(
349 requiredBufferSize[shaderIndex]))
350 {
351 return gl::OutOfMemory() << "Memory allocation failure.";
352 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400353 size_t minAlignment = static_cast<size_t>(
354 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
355
356 mDefaultUniformBlocks[shaderIndex].storage.init(minAlignment);
357
358 // Initialize uniform buffer memory to zero by default.
Jamie Madill76e471e2017-10-21 09:56:01 -0400359 mDefaultUniformBlocks[shaderIndex].uniformData.fill(0);
360 mDefaultUniformBlocks[shaderIndex].uniformsDirty = true;
361
362 anyDirty = true;
363 }
364 else
365 {
366 allDirty = false;
367 }
368 }
369
370 if (anyDirty)
371 {
372 // Initialize the "empty" uniform block if necessary.
373 if (!allDirty)
374 {
375 VkBufferCreateInfo uniformBufferInfo;
376 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
377 uniformBufferInfo.pNext = nullptr;
378 uniformBufferInfo.flags = 0;
379 uniformBufferInfo.size = 1;
380 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
381 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
382 uniformBufferInfo.queueFamilyIndexCount = 0;
383 uniformBufferInfo.pQueueFamilyIndices = nullptr;
384
385 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
386
Luc Ferron7a06ac12018-03-15 10:17:04 -0400387 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500388 VkMemoryPropertyFlags flags =
389 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400390 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500391 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400392 &mEmptyUniformBlockStorage.memory, &requiredSize));
393 }
394
Jamie Madill8c3988c2017-12-21 14:44:56 -0500395 // Ensure the descriptor set range includes the uniform buffers at position 0.
396 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400397 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400398
399 return gl::NoError();
400}
401
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400402GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
403{
404 UNIMPLEMENTED();
405 return GLboolean();
406}
407
Jamie Madill76e471e2017-10-21 09:56:01 -0400408template <typename T>
409void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
410{
411 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
412 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
413
Luc Ferron7cec3352018-03-13 13:29:34 -0400414 if (linkedUniform.isSampler())
415 {
416 UNIMPLEMENTED();
417 return;
418 }
419
Jamie Madill76e471e2017-10-21 09:56:01 -0400420 if (linkedUniform.type == entryPointType)
421 {
422 for (auto &uniformBlock : mDefaultUniformBlocks)
423 {
424 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Jamie Madill5eaddbd2018-03-14 16:16:03 -0400425
426 // Assume an offset of -1 means the block is unused.
427 if (layoutInfo.offset == -1)
428 {
429 continue;
430 }
431
Jamie Madill76e471e2017-10-21 09:56:01 -0400432 UpdateDefaultUniformBlock(count, linkedUniform.typeInfo->componentCount, v, layoutInfo,
433 &uniformBlock.uniformData);
Jamie Madill5eaddbd2018-03-14 16:16:03 -0400434
435 uniformBlock.uniformsDirty = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400436 }
437 }
438 else
439 {
440 ASSERT(linkedUniform.type == gl::VariableBoolVectorType(entryPointType));
441 UNIMPLEMENTED();
442 }
443}
444
Luc Ferron7cec3352018-03-13 13:29:34 -0400445template <typename T>
446void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
447{
448 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
449 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
450
451 if (linkedUniform.isSampler())
452 {
453 UNIMPLEMENTED();
454 return;
455 }
456
457 ASSERT(linkedUniform.typeInfo->componentType == entryPointType);
Olli Etuaho107c7242018-03-20 15:45:35 +0200458 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800459 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400460
Jiawei Shao385b3e02018-03-21 09:43:28 +0800461 const DefaultUniformBlock &uniformBlock =
462 mDefaultUniformBlocks[static_cast<GLuint>(shaderType)];
Luc Ferron7cec3352018-03-13 13:29:34 -0400463 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
464 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, v, layoutInfo,
465 &uniformBlock.uniformData);
466}
467
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400468void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
469{
Jamie Madill76e471e2017-10-21 09:56:01 -0400470 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400471}
472
473void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
474{
Jamie Madill76e471e2017-10-21 09:56:01 -0400475 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400476}
477
478void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
479{
Jamie Madill76e471e2017-10-21 09:56:01 -0400480 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400481}
482
483void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
484{
Jamie Madill76e471e2017-10-21 09:56:01 -0400485 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400486}
487
488void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
489{
Luc Ferron7cec3352018-03-13 13:29:34 -0400490 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400491}
492
493void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
494{
495 UNIMPLEMENTED();
496}
497
498void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
499{
500 UNIMPLEMENTED();
501}
502
503void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
504{
505 UNIMPLEMENTED();
506}
507
508void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
509{
510 UNIMPLEMENTED();
511}
512
513void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
514{
515 UNIMPLEMENTED();
516}
517
518void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
519{
520 UNIMPLEMENTED();
521}
522
523void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
524{
525 UNIMPLEMENTED();
526}
527
528void ProgramVk::setUniformMatrix2fv(GLint location,
529 GLsizei count,
530 GLboolean transpose,
531 const GLfloat *value)
532{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400533 if (transpose == GL_TRUE)
534 {
535 UNIMPLEMENTED();
536 return;
537 }
538
539 setUniformImpl(location, count, value, GL_FLOAT_MAT2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400540}
541
542void ProgramVk::setUniformMatrix3fv(GLint location,
543 GLsizei count,
544 GLboolean transpose,
545 const GLfloat *value)
546{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400547 if (transpose == GL_TRUE)
548 {
549 UNIMPLEMENTED();
550 return;
551 }
552 setUniformImpl(location, count, value, GL_FLOAT_MAT3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400553}
554
555void ProgramVk::setUniformMatrix4fv(GLint location,
556 GLsizei count,
557 GLboolean transpose,
558 const GLfloat *value)
559{
Luc Ferron5b64aca2018-03-14 14:55:58 -0400560 if (transpose == GL_TRUE)
561 {
562 UNIMPLEMENTED();
563 return;
564 }
565
566 setUniformImpl(location, count, value, GL_FLOAT_MAT4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400567}
568
569void ProgramVk::setUniformMatrix2x3fv(GLint location,
570 GLsizei count,
571 GLboolean transpose,
572 const GLfloat *value)
573{
574 UNIMPLEMENTED();
575}
576
577void ProgramVk::setUniformMatrix3x2fv(GLint location,
578 GLsizei count,
579 GLboolean transpose,
580 const GLfloat *value)
581{
582 UNIMPLEMENTED();
583}
584
585void ProgramVk::setUniformMatrix2x4fv(GLint location,
586 GLsizei count,
587 GLboolean transpose,
588 const GLfloat *value)
589{
590 UNIMPLEMENTED();
591}
592
593void ProgramVk::setUniformMatrix4x2fv(GLint location,
594 GLsizei count,
595 GLboolean transpose,
596 const GLfloat *value)
597{
598 UNIMPLEMENTED();
599}
600
601void ProgramVk::setUniformMatrix3x4fv(GLint location,
602 GLsizei count,
603 GLboolean transpose,
604 const GLfloat *value)
605{
606 UNIMPLEMENTED();
607}
608
609void ProgramVk::setUniformMatrix4x3fv(GLint location,
610 GLsizei count,
611 GLboolean transpose,
612 const GLfloat *value)
613{
614 UNIMPLEMENTED();
615}
616
617void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
618{
619 UNIMPLEMENTED();
620}
621
Sami Väisänen46eaa942016-06-29 10:26:37 +0300622void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
623 GLenum genMode,
624 GLint components,
625 const GLfloat *coeffs)
626{
627 UNIMPLEMENTED();
628}
629
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500630const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
631{
632 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
633 return mLinkedVertexModule;
634}
635
Jamie Madillf2f6d372018-01-10 21:37:23 -0500636Serial ProgramVk::getVertexModuleSerial() const
637{
638 return mVertexModuleSerial;
639}
640
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500641const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
642{
643 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
644 return mLinkedFragmentModule;
645}
646
Jamie Madillf2f6d372018-01-10 21:37:23 -0500647Serial ProgramVk::getFragmentModuleSerial() const
648{
649 return mFragmentModuleSerial;
650}
651
Luc Ferron6ea1b412018-03-21 16:13:01 -0400652vk::Error ProgramVk::allocateDescriptorSet(ContextVk *contextVk, uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400653{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500654 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400655
656 // Write out to a new a descriptor set.
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400657 DynamicDescriptorPool *dynamicDescriptorPool = contextVk->getDynamicDescriptorPool();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500658 const auto &descriptorSetLayouts = renderer->getGraphicsDescriptorSetLayouts();
Jamie Madill76e471e2017-10-21 09:56:01 -0400659
Luc Ferron6ea1b412018-03-21 16:13:01 -0400660 uint32_t potentialNewCount = descriptorSetIndex + 1;
661 if (potentialNewCount > mDescriptorSets.size())
662 {
663 mDescriptorSets.resize(potentialNewCount, VK_NULL_HANDLE);
664 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400665
Luc Ferron6ea1b412018-03-21 16:13:01 -0400666 const VkDescriptorSetLayout *descriptorSetLayout =
667 descriptorSetLayouts[descriptorSetIndex].ptr();
668
669 ANGLE_TRY(dynamicDescriptorPool->allocateDescriptorSets(contextVk, descriptorSetLayout, 1,
670 &mDescriptorSets[descriptorSetIndex]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400671 return vk::NoError();
672}
673
Jamie Madill54164b02017-08-28 15:17:37 -0400674void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
675{
Luc Ferron7cec3352018-03-13 13:29:34 -0400676 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400677}
678
679void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
680{
Luc Ferron7cec3352018-03-13 13:29:34 -0400681 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400682}
683
684void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
685{
686 UNIMPLEMENTED();
687}
688
Jamie Madill76e471e2017-10-21 09:56:01 -0400689vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
690{
691 if (!mDefaultUniformBlocks[VertexShader].uniformsDirty &&
692 !mDefaultUniformBlocks[FragmentShader].uniformsDirty)
693 {
694 return vk::NoError();
695 }
696
Jamie Madill8c3988c2017-12-21 14:44:56 -0500697 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400698
Jamie Madill76e471e2017-10-21 09:56:01 -0400699 // Update buffer memory by immediate mapping. This immediate update only works once.
700 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400701 bool anyNewBufferAllocated = false;
702 for (size_t index = 0; index < mDefaultUniformBlocks.size(); index++)
Jamie Madill76e471e2017-10-21 09:56:01 -0400703 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400704 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[index];
705
Jamie Madill76e471e2017-10-21 09:56:01 -0400706 if (uniformBlock.uniformsDirty)
707 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400708 bool bufferModified = false;
709 ANGLE_TRY(SyncDefaultUniformBlock(contextVk, uniformBlock.storage,
710 uniformBlock.uniformData,
711 &mUniformBlocksOffsets[index], &bufferModified));
Jamie Madill76e471e2017-10-21 09:56:01 -0400712 uniformBlock.uniformsDirty = false;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400713
714 if (bufferModified)
715 {
716 anyNewBufferAllocated = true;
717 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400718 }
719 }
720
Luc Ferron7a06ac12018-03-15 10:17:04 -0400721 if (anyNewBufferAllocated)
722 {
723 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
724 // modify the descriptor sets once initialized.
Luc Ferron6ea1b412018-03-21 16:13:01 -0400725 ANGLE_TRY(allocateDescriptorSet(contextVk, UniformBufferIndex));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400726 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
727 }
728
Jamie Madill76e471e2017-10-21 09:56:01 -0400729 return vk::NoError();
730}
731
732vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
733{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400734 std::array<VkDescriptorBufferInfo, MaxShaderIndex> descriptorBufferInfo;
735 std::array<VkWriteDescriptorSet, MaxShaderIndex> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400736 uint32_t bufferCount = 0;
737
738 for (auto &uniformBlock : mDefaultUniformBlocks)
739 {
740 auto &bufferInfo = descriptorBufferInfo[bufferCount];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400741 auto &writeInfo = writeDescriptorInfo[bufferCount];
Jamie Madill76e471e2017-10-21 09:56:01 -0400742
743 if (!uniformBlock.uniformData.empty())
744 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400745 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400746 }
747 else
748 {
749 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
750 }
751
752 bufferInfo.offset = 0;
753 bufferInfo.range = VK_WHOLE_SIZE;
754
Jamie Madill76e471e2017-10-21 09:56:01 -0400755 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
756 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400757 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill76e471e2017-10-21 09:56:01 -0400758 writeInfo.dstBinding = bufferCount;
759 writeInfo.dstArrayElement = 0;
760 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400761 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400762 writeInfo.pImageInfo = nullptr;
763 writeInfo.pBufferInfo = &bufferInfo;
764 writeInfo.pTexelBufferView = nullptr;
765
766 bufferCount++;
767 }
768
769 VkDevice device = contextVk->getDevice();
770
771 vkUpdateDescriptorSets(device, bufferCount, writeDescriptorInfo.data(), 0, nullptr);
772
773 return vk::NoError();
774}
775
Jamie Madill5547b382017-10-23 18:16:01 -0400776const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400777{
Jamie Madill5547b382017-10-23 18:16:01 -0400778 return mDescriptorSets;
779}
780
Luc Ferron7a06ac12018-03-15 10:17:04 -0400781const uint32_t *ProgramVk::getDynamicOffsets()
782{
783 // If we have no descriptor set being used, we do not need to specify any offsets when binding
784 // the descriptor sets.
785 if (!mUsedDescriptorSetRange.contains(0))
786 return nullptr;
787
788 return mUniformBlocksOffsets.data();
789}
790
791uint32_t ProgramVk::getDynamicOffsetsCount()
792{
793 if (!mUsedDescriptorSetRange.contains(0))
794 return 0;
795
796 return static_cast<uint32_t>(mUniformBlocksOffsets.size());
797}
798
Jamie Madill8c3988c2017-12-21 14:44:56 -0500799const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400800{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500801 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400802}
803
Luc Ferron6ea1b412018-03-21 16:13:01 -0400804vk::Error ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
Jamie Madill5547b382017-10-23 18:16:01 -0400805{
806 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
807 {
Luc Ferron6ea1b412018-03-21 16:13:01 -0400808 return vk::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400809 }
810
Luc Ferron6ea1b412018-03-21 16:13:01 -0400811 ANGLE_TRY(allocateDescriptorSet(contextVk, TextureIndex));
812
Jamie Madill8c3988c2017-12-21 14:44:56 -0500813 ASSERT(mUsedDescriptorSetRange.contains(1));
814 VkDescriptorSet descriptorSet = mDescriptorSets[1];
Jamie Madill5547b382017-10-23 18:16:01 -0400815
816 // TODO(jmadill): Don't hard-code the texture limit.
817 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
818 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
819 uint32_t imageCount = 0;
820
821 const gl::State &glState = contextVk->getGLState();
822 const auto &completeTextures = glState.getCompleteTextureCache();
823
824 for (const auto &samplerBinding : mState.getSamplerBindings())
825 {
826 ASSERT(!samplerBinding.unreferenced);
827
828 // TODO(jmadill): Sampler arrays
829 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
830
831 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
832 const gl::Texture *texture = completeTextures[textureUnit];
833
834 // TODO(jmadill): Incomplete textures handling.
835 ASSERT(texture);
836
Jamie Madille1f3ad42017-10-28 23:00:42 -0400837 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill5547b382017-10-23 18:16:01 -0400838 const vk::Image &image = textureVk->getImage();
839
840 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
841
842 imageInfo.sampler = textureVk->getSampler().getHandle();
843 imageInfo.imageView = textureVk->getImageView().getHandle();
844 imageInfo.imageLayout = image.getCurrentLayout();
845
846 auto &writeInfo = writeDescriptorInfo[imageCount];
847
848 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
849 writeInfo.pNext = nullptr;
850 writeInfo.dstSet = descriptorSet;
851 writeInfo.dstBinding = imageCount;
852 writeInfo.dstArrayElement = 0;
853 writeInfo.descriptorCount = 1;
854 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
855 writeInfo.pImageInfo = &imageInfo;
856 writeInfo.pBufferInfo = nullptr;
857 writeInfo.pTexelBufferView = nullptr;
858
859 imageCount++;
860 }
861
862 VkDevice device = contextVk->getDevice();
863
864 ASSERT(imageCount > 0);
865 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
866
867 mDirtyTextures = false;
Luc Ferron6ea1b412018-03-21 16:13:01 -0400868 return vk::NoError();
Jamie Madill5547b382017-10-23 18:16:01 -0400869}
870
871void ProgramVk::invalidateTextures()
872{
873 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400874}
875
Luc Ferron7a06ac12018-03-15 10:17:04 -0400876void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
877{
878 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
879 {
880 block.storage.setMinimumSize(minSize);
881 }
882}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400883} // namespace rx