blob: 5b2411d17b17eec574207f5a26b99aad35628604 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
16#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
17#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040018#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
22
Jamie Madill76e471e2017-10-21 09:56:01 -040023namespace
24{
25
26gl::Error InitDefaultUniformBlock(const gl::Context *context,
27 VkDevice device,
28 gl::Shader *shader,
29 vk::BufferAndMemory *storageOut,
30 sh::BlockLayoutMap *blockLayoutMapOut,
31 size_t *requiredSizeOut)
32{
33 const auto &uniforms = shader->getUniforms(context);
34
35 if (uniforms.empty())
36 {
37 *requiredSizeOut = 0;
38 return gl::NoError();
39 }
40
41 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020042 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040043
44 size_t blockSize = blockEncoder.getBlockSize();
45
46 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
47 if (blockSize == 0)
48 {
49 *requiredSizeOut = 0;
50 return gl::NoError();
51 }
52
53 VkBufferCreateInfo uniformBufferInfo;
54 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
55 uniformBufferInfo.pNext = nullptr;
56 uniformBufferInfo.flags = 0;
57 uniformBufferInfo.size = blockSize;
58 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
59 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
60 uniformBufferInfo.queueFamilyIndexCount = 0;
61 uniformBufferInfo.pQueueFamilyIndices = nullptr;
62
63 ANGLE_TRY(storageOut->buffer.init(device, uniformBufferInfo));
64
Jamie Madill57dd97a2018-02-06 17:10:49 -050065 // Assume host vislble/coherent memory available.
66 VkMemoryPropertyFlags flags =
67 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill57fbfd82018-02-14 12:45:34 -050068
69 ContextVk *contextVk = vk::GetImpl(context);
70
71 ANGLE_TRY(AllocateBufferMemory(contextVk->getRenderer(), flags, &storageOut->buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -040072 &storageOut->memory, requiredSizeOut));
73
74 return gl::NoError();
75}
76
77template <typename T>
78void UpdateDefaultUniformBlock(GLsizei count,
79 int componentCount,
80 const T *v,
81 const sh::BlockMemberInfo &layoutInfo,
82 angle::MemoryBuffer *uniformData)
83{
84 // Assume an offset of -1 means the block is unused.
85 if (layoutInfo.offset == -1)
86 {
87 return;
88 }
89
90 int elementSize = sizeof(T) * componentCount;
91 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
92 {
93 uint8_t *writePtr = uniformData->data() + layoutInfo.offset;
94 memcpy(writePtr, v, elementSize * count);
95 }
96 else
97 {
98 UNIMPLEMENTED();
99 }
100}
101
102vk::Error SyncDefaultUniformBlock(VkDevice device,
103 vk::DeviceMemory *bufferMemory,
104 const angle::MemoryBuffer &bufferData)
105{
106 ASSERT(bufferMemory->valid() && !bufferData.empty());
107 uint8_t *mapPointer = nullptr;
108 ANGLE_TRY(bufferMemory->map(device, 0, bufferData.size(), 0, &mapPointer));
109 memcpy(mapPointer, bufferData.data(), bufferData.size());
110 bufferMemory->unmap(device);
111 return vk::NoError();
112}
113
114enum ShaderIndex : uint32_t
115{
116 MinShaderIndex = 0,
117 VertexShader = MinShaderIndex,
118 FragmentShader = 1,
119 MaxShaderIndex = 2,
120};
121
122gl::Shader *GetShader(const gl::ProgramState &programState, uint32_t shaderIndex)
123{
124 switch (shaderIndex)
125 {
126 case VertexShader:
127 return programState.getAttachedVertexShader();
128 case FragmentShader:
129 return programState.getAttachedFragmentShader();
130 default:
131 UNREACHABLE();
132 return nullptr;
133 }
134}
135
136} // anonymous namespace
137
138ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
139 : storage(), uniformData(), uniformsDirty(false), uniformLayout()
140{
141}
142
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500143ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
144{
145}
146
Jamie Madill76e471e2017-10-21 09:56:01 -0400147ProgramVk::ProgramVk(const gl::ProgramState &state)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500148 : ProgramImpl(state), mDefaultUniformBlocks(), mUsedDescriptorSetRange(), mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400149{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500150 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400151}
152
153ProgramVk::~ProgramVk()
154{
155}
156
Jamie Madillb7d924a2018-03-10 11:16:54 -0500157gl::Error ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500158{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500159 ContextVk *contextVk = vk::GetImpl(contextImpl);
Jamie Madillb7d924a2018-03-10 11:16:54 -0500160 return reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400161}
Jamie Madill5deea722017-02-16 10:44:46 -0500162
Jamie Madillb7d924a2018-03-10 11:16:54 -0500163vk::Error ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400164{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500165 // TODO(jmadill): Handle re-linking a program that is in-use. http://anglebug.com/2397
166
167 VkDevice device = contextVk->getDevice();
168
Jamie Madill76e471e2017-10-21 09:56:01 -0400169 for (auto &uniformBlock : mDefaultUniformBlocks)
170 {
171 uniformBlock.storage.memory.destroy(device);
172 uniformBlock.storage.buffer.destroy(device);
173 }
174
175 mEmptyUniformBlockStorage.memory.destroy(device);
176 mEmptyUniformBlockStorage.buffer.destroy(device);
177
Jamie Madill5deea722017-02-16 10:44:46 -0500178 mLinkedFragmentModule.destroy(device);
179 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500180 mVertexModuleSerial = Serial();
181 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400182
Jamie Madill67ae6c52018-03-09 11:49:01 -0500183 // Free our descriptor set handles.
184 if (!mDescriptorSets.empty())
185 {
186 vk::DescriptorPool *descriptorPool = contextVk->getDescriptorPool();
Jamie Madillb7d924a2018-03-10 11:16:54 -0500187 ANGLE_TRY(descriptorPool->freeDescriptorSets(
188 device, static_cast<uint32_t>(mDescriptorSets.size()), mDescriptorSets.data()));
Jamie Madill67ae6c52018-03-09 11:49:01 -0500189 }
Jamie Madill5547b382017-10-23 18:16:01 -0400190 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500191 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400192 mDirtyTextures = false;
Jamie Madillb7d924a2018-03-10 11:16:54 -0500193
194 return vk::NoError();
Jamie Madill5deea722017-02-16 10:44:46 -0500195}
196
Jamie Madill9cf9e872017-06-05 12:59:25 -0400197gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
198 gl::InfoLog &infoLog,
199 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400200{
201 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500202 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400203}
204
Jamie Madill27a60632017-06-30 15:12:01 -0400205void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400206{
207 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400208}
209
210void ProgramVk::setBinaryRetrievableHint(bool retrievable)
211{
212 UNIMPLEMENTED();
213}
214
Yunchao He61afff12017-03-14 15:34:03 +0800215void ProgramVk::setSeparable(bool separable)
216{
217 UNIMPLEMENTED();
218}
219
Jamie Madill9cf9e872017-06-05 12:59:25 -0400220gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500221 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400222 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400223{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400224 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400225 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500226 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400227 VkDevice device = renderer->getDevice();
228
Jamie Madillb7d924a2018-03-10 11:16:54 -0500229 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500230
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500231 std::vector<uint32_t> vertexCode;
232 std::vector<uint32_t> fragmentCode;
233 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500234 ANGLE_TRY_RESULT(
235 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
236 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500237 if (!linkSuccess)
238 {
239 return false;
240 }
241
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500242 {
243 VkShaderModuleCreateInfo vertexShaderInfo;
244 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
245 vertexShaderInfo.pNext = nullptr;
246 vertexShaderInfo.flags = 0;
247 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
248 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400249
250 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500251 mVertexModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500252 }
253
254 {
255 VkShaderModuleCreateInfo fragmentShaderInfo;
256 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
257 fragmentShaderInfo.pNext = nullptr;
258 fragmentShaderInfo.flags = 0;
259 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
260 fragmentShaderInfo.pCode = fragmentCode.data();
261
Jamie Madillc5143482017-10-15 20:20:06 -0400262 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500263 mFragmentModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500264 }
265
Jamie Madill76e471e2017-10-21 09:56:01 -0400266 ANGLE_TRY(initDescriptorSets(contextVk));
267 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500268
Jamie Madill8c3988c2017-12-21 14:44:56 -0500269 if (!mState.getSamplerUniformRange().empty())
270 {
271 // Ensure the descriptor set range includes the textures at position 1.
272 mUsedDescriptorSetRange.extend(1);
273 mDirtyTextures = true;
274 }
275
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500276 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400277}
278
Jamie Madill76e471e2017-10-21 09:56:01 -0400279gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
280{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400281 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500282 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400283 VkDevice device = contextVk->getDevice();
284
285 // Process vertex and fragment uniforms into std140 packing.
286 std::array<sh::BlockLayoutMap, 2> layoutMap;
287 std::array<size_t, 2> requiredBufferSize = {{0, 0}};
288
289 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
290 {
291 ANGLE_TRY(InitDefaultUniformBlock(glContext, device, GetShader(mState, shaderIndex),
292 &mDefaultUniformBlocks[shaderIndex].storage,
293 &layoutMap[shaderIndex],
294 &requiredBufferSize[shaderIndex]));
295 }
296
297 // Init the default block layout info.
298 const auto &locations = mState.getUniformLocations();
299 const auto &uniforms = mState.getUniforms();
300 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
301 {
302 std::array<sh::BlockMemberInfo, 2> layoutInfo;
303
304 const auto &location = locations[locationIndex];
305 if (location.used() && !location.ignored)
306 {
Jamie Madillde03e002017-10-21 14:04:20 -0400307 const auto &uniform = uniforms[location.index];
308
309 if (uniform.isSampler())
310 continue;
311
Jamie Madill76e471e2017-10-21 09:56:01 -0400312 std::string uniformName = uniform.name;
313 if (uniform.isArray())
314 {
Olli Etuaho1734e172017-10-27 15:30:27 +0300315 uniformName += ArrayString(location.arrayIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400316 }
317
318 bool found = false;
319
320 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
321 {
322 auto it = layoutMap[shaderIndex].find(uniformName);
323 if (it != layoutMap[shaderIndex].end())
324 {
325 found = true;
326 layoutInfo[shaderIndex] = it->second;
327 }
328 }
329
330 ASSERT(found);
331 }
332
333 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
334 {
335 mDefaultUniformBlocks[shaderIndex].uniformLayout.push_back(layoutInfo[shaderIndex]);
336 }
337 }
338
339 bool anyDirty = false;
340 bool allDirty = true;
341
342 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
343 {
344 if (requiredBufferSize[shaderIndex] > 0)
345 {
346 if (!mDefaultUniformBlocks[shaderIndex].uniformData.resize(
347 requiredBufferSize[shaderIndex]))
348 {
349 return gl::OutOfMemory() << "Memory allocation failure.";
350 }
351 mDefaultUniformBlocks[shaderIndex].uniformData.fill(0);
352 mDefaultUniformBlocks[shaderIndex].uniformsDirty = true;
353
354 anyDirty = true;
355 }
356 else
357 {
358 allDirty = false;
359 }
360 }
361
362 if (anyDirty)
363 {
364 // Initialize the "empty" uniform block if necessary.
365 if (!allDirty)
366 {
367 VkBufferCreateInfo uniformBufferInfo;
368 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
369 uniformBufferInfo.pNext = nullptr;
370 uniformBufferInfo.flags = 0;
371 uniformBufferInfo.size = 1;
372 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
373 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
374 uniformBufferInfo.queueFamilyIndexCount = 0;
375 uniformBufferInfo.pQueueFamilyIndices = nullptr;
376
377 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
378
Jamie Madill57dd97a2018-02-06 17:10:49 -0500379 // Assume host vislble/coherent memory available.
380 VkMemoryPropertyFlags flags =
381 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400382 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500383 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400384 &mEmptyUniformBlockStorage.memory, &requiredSize));
385 }
386
387 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
Jamie Madill8c3988c2017-12-21 14:44:56 -0500388
389 // Ensure the descriptor set range includes the uniform buffers at position 0.
390 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400391 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400392
393 return gl::NoError();
394}
395
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400396GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
397{
398 UNIMPLEMENTED();
399 return GLboolean();
400}
401
Jamie Madill76e471e2017-10-21 09:56:01 -0400402template <typename T>
403void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
404{
405 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
406 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
407
408 if (linkedUniform.type == entryPointType)
409 {
410 for (auto &uniformBlock : mDefaultUniformBlocks)
411 {
412 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
413 UpdateDefaultUniformBlock(count, linkedUniform.typeInfo->componentCount, v, layoutInfo,
414 &uniformBlock.uniformData);
415 }
416 }
417 else
418 {
419 ASSERT(linkedUniform.type == gl::VariableBoolVectorType(entryPointType));
420 UNIMPLEMENTED();
421 }
422}
423
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400424void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
425{
Jamie Madill76e471e2017-10-21 09:56:01 -0400426 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400427}
428
429void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
430{
Jamie Madill76e471e2017-10-21 09:56:01 -0400431 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400432}
433
434void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
435{
Jamie Madill76e471e2017-10-21 09:56:01 -0400436 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400437}
438
439void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
440{
Jamie Madill76e471e2017-10-21 09:56:01 -0400441 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400442}
443
444void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
445{
446 UNIMPLEMENTED();
447}
448
449void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
450{
451 UNIMPLEMENTED();
452}
453
454void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
455{
456 UNIMPLEMENTED();
457}
458
459void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
460{
461 UNIMPLEMENTED();
462}
463
464void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
465{
466 UNIMPLEMENTED();
467}
468
469void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
470{
471 UNIMPLEMENTED();
472}
473
474void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
475{
476 UNIMPLEMENTED();
477}
478
479void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
480{
481 UNIMPLEMENTED();
482}
483
484void ProgramVk::setUniformMatrix2fv(GLint location,
485 GLsizei count,
486 GLboolean transpose,
487 const GLfloat *value)
488{
489 UNIMPLEMENTED();
490}
491
492void ProgramVk::setUniformMatrix3fv(GLint location,
493 GLsizei count,
494 GLboolean transpose,
495 const GLfloat *value)
496{
497 UNIMPLEMENTED();
498}
499
500void ProgramVk::setUniformMatrix4fv(GLint location,
501 GLsizei count,
502 GLboolean transpose,
503 const GLfloat *value)
504{
505 UNIMPLEMENTED();
506}
507
508void ProgramVk::setUniformMatrix2x3fv(GLint location,
509 GLsizei count,
510 GLboolean transpose,
511 const GLfloat *value)
512{
513 UNIMPLEMENTED();
514}
515
516void ProgramVk::setUniformMatrix3x2fv(GLint location,
517 GLsizei count,
518 GLboolean transpose,
519 const GLfloat *value)
520{
521 UNIMPLEMENTED();
522}
523
524void ProgramVk::setUniformMatrix2x4fv(GLint location,
525 GLsizei count,
526 GLboolean transpose,
527 const GLfloat *value)
528{
529 UNIMPLEMENTED();
530}
531
532void ProgramVk::setUniformMatrix4x2fv(GLint location,
533 GLsizei count,
534 GLboolean transpose,
535 const GLfloat *value)
536{
537 UNIMPLEMENTED();
538}
539
540void ProgramVk::setUniformMatrix3x4fv(GLint location,
541 GLsizei count,
542 GLboolean transpose,
543 const GLfloat *value)
544{
545 UNIMPLEMENTED();
546}
547
548void ProgramVk::setUniformMatrix4x3fv(GLint location,
549 GLsizei count,
550 GLboolean transpose,
551 const GLfloat *value)
552{
553 UNIMPLEMENTED();
554}
555
556void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
557{
558 UNIMPLEMENTED();
559}
560
Sami Väisänen46eaa942016-06-29 10:26:37 +0300561void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
562 GLenum genMode,
563 GLint components,
564 const GLfloat *coeffs)
565{
566 UNIMPLEMENTED();
567}
568
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500569const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
570{
571 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
572 return mLinkedVertexModule;
573}
574
Jamie Madillf2f6d372018-01-10 21:37:23 -0500575Serial ProgramVk::getVertexModuleSerial() const
576{
577 return mVertexModuleSerial;
578}
579
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500580const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
581{
582 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
583 return mLinkedFragmentModule;
584}
585
Jamie Madillf2f6d372018-01-10 21:37:23 -0500586Serial ProgramVk::getFragmentModuleSerial() const
587{
588 return mFragmentModuleSerial;
589}
590
Jamie Madill76e471e2017-10-21 09:56:01 -0400591vk::Error ProgramVk::initDescriptorSets(ContextVk *contextVk)
592{
Jamie Madill5547b382017-10-23 18:16:01 -0400593 ASSERT(mDescriptorSets.empty());
Jamie Madill76e471e2017-10-21 09:56:01 -0400594
Jamie Madill8c3988c2017-12-21 14:44:56 -0500595 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400596 VkDevice device = contextVk->getDevice();
597
598 // Write out to a new a descriptor set.
599 // TODO(jmadill): Handle descriptor set lifetime.
600 vk::DescriptorPool *descriptorPool = contextVk->getDescriptorPool();
601
Jamie Madill8c3988c2017-12-21 14:44:56 -0500602 const auto &descriptorSetLayouts = renderer->getGraphicsDescriptorSetLayouts();
603
604 uint32_t descriptorSetCount = static_cast<uint32_t>(descriptorSetLayouts.size());
Jamie Madill76e471e2017-10-21 09:56:01 -0400605
Jamie Madill5547b382017-10-23 18:16:01 -0400606 VkDescriptorSetAllocateInfo allocInfo;
607 allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
608 allocInfo.pNext = nullptr;
609 allocInfo.descriptorPool = descriptorPool->getHandle();
610 allocInfo.descriptorSetCount = descriptorSetCount;
Jamie Madill8c3988c2017-12-21 14:44:56 -0500611 allocInfo.pSetLayouts = descriptorSetLayouts[0].ptr();
Jamie Madill76e471e2017-10-21 09:56:01 -0400612
Jamie Madill5547b382017-10-23 18:16:01 -0400613 mDescriptorSets.resize(descriptorSetCount, VK_NULL_HANDLE);
614 ANGLE_TRY(descriptorPool->allocateDescriptorSets(device, allocInfo, &mDescriptorSets[0]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400615 return vk::NoError();
616}
617
Jamie Madill54164b02017-08-28 15:17:37 -0400618void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
619{
620 UNIMPLEMENTED();
621}
622
623void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
624{
625 UNIMPLEMENTED();
626}
627
628void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
629{
630 UNIMPLEMENTED();
631}
632
Jamie Madill76e471e2017-10-21 09:56:01 -0400633vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
634{
635 if (!mDefaultUniformBlocks[VertexShader].uniformsDirty &&
636 !mDefaultUniformBlocks[FragmentShader].uniformsDirty)
637 {
638 return vk::NoError();
639 }
640
Jamie Madill8c3988c2017-12-21 14:44:56 -0500641 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400642
Jamie Madill76e471e2017-10-21 09:56:01 -0400643 VkDevice device = contextVk->getDevice();
644
645 // Update buffer memory by immediate mapping. This immediate update only works once.
646 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
647 for (auto &uniformBlock : mDefaultUniformBlocks)
648 {
649 if (uniformBlock.uniformsDirty)
650 {
651 ANGLE_TRY(SyncDefaultUniformBlock(device, &uniformBlock.storage.memory,
652 uniformBlock.uniformData));
653 uniformBlock.uniformsDirty = false;
654 }
655 }
656
657 return vk::NoError();
658}
659
660vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
661{
662 std::array<VkDescriptorBufferInfo, 2> descriptorBufferInfo;
663 std::array<VkWriteDescriptorSet, 2> writeDescriptorInfo;
664 uint32_t bufferCount = 0;
665
666 for (auto &uniformBlock : mDefaultUniformBlocks)
667 {
668 auto &bufferInfo = descriptorBufferInfo[bufferCount];
669
670 if (!uniformBlock.uniformData.empty())
671 {
672 bufferInfo.buffer = uniformBlock.storage.buffer.getHandle();
673 }
674 else
675 {
676 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
677 }
678
679 bufferInfo.offset = 0;
680 bufferInfo.range = VK_WHOLE_SIZE;
681
682 auto &writeInfo = writeDescriptorInfo[bufferCount];
683
684 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
685 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400686 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill76e471e2017-10-21 09:56:01 -0400687 writeInfo.dstBinding = bufferCount;
688 writeInfo.dstArrayElement = 0;
689 writeInfo.descriptorCount = 1;
690 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
691 writeInfo.pImageInfo = nullptr;
692 writeInfo.pBufferInfo = &bufferInfo;
693 writeInfo.pTexelBufferView = nullptr;
694
695 bufferCount++;
696 }
697
698 VkDevice device = contextVk->getDevice();
699
700 vkUpdateDescriptorSets(device, bufferCount, writeDescriptorInfo.data(), 0, nullptr);
701
702 return vk::NoError();
703}
704
Jamie Madill5547b382017-10-23 18:16:01 -0400705const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400706{
Jamie Madill5547b382017-10-23 18:16:01 -0400707 return mDescriptorSets;
708}
709
Jamie Madill8c3988c2017-12-21 14:44:56 -0500710const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400711{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500712 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400713}
714
715void ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
716{
717 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
718 {
719 return;
720 }
721
Jamie Madill8c3988c2017-12-21 14:44:56 -0500722 ASSERT(mUsedDescriptorSetRange.contains(1));
723 VkDescriptorSet descriptorSet = mDescriptorSets[1];
Jamie Madill5547b382017-10-23 18:16:01 -0400724
725 // TODO(jmadill): Don't hard-code the texture limit.
726 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
727 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
728 uint32_t imageCount = 0;
729
730 const gl::State &glState = contextVk->getGLState();
731 const auto &completeTextures = glState.getCompleteTextureCache();
732
733 for (const auto &samplerBinding : mState.getSamplerBindings())
734 {
735 ASSERT(!samplerBinding.unreferenced);
736
737 // TODO(jmadill): Sampler arrays
738 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
739
740 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
741 const gl::Texture *texture = completeTextures[textureUnit];
742
743 // TODO(jmadill): Incomplete textures handling.
744 ASSERT(texture);
745
Jamie Madille1f3ad42017-10-28 23:00:42 -0400746 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill5547b382017-10-23 18:16:01 -0400747 const vk::Image &image = textureVk->getImage();
748
749 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
750
751 imageInfo.sampler = textureVk->getSampler().getHandle();
752 imageInfo.imageView = textureVk->getImageView().getHandle();
753 imageInfo.imageLayout = image.getCurrentLayout();
754
755 auto &writeInfo = writeDescriptorInfo[imageCount];
756
757 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
758 writeInfo.pNext = nullptr;
759 writeInfo.dstSet = descriptorSet;
760 writeInfo.dstBinding = imageCount;
761 writeInfo.dstArrayElement = 0;
762 writeInfo.descriptorCount = 1;
763 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
764 writeInfo.pImageInfo = &imageInfo;
765 writeInfo.pBufferInfo = nullptr;
766 writeInfo.pTexelBufferView = nullptr;
767
768 imageCount++;
769 }
770
771 VkDevice device = contextVk->getDevice();
772
773 ASSERT(imageCount > 0);
774 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
775
776 mDirtyTextures = false;
777}
778
779void ProgramVk::invalidateTextures()
780{
781 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400782}
783
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400784} // namespace rx