blob: 24578e9025bc7b1ec10d17db9c235bfd31f4bdff [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
16#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
17#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040018#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
22
Jamie Madill76e471e2017-10-21 09:56:01 -040023namespace
24{
25
26gl::Error InitDefaultUniformBlock(const gl::Context *context,
27 VkDevice device,
28 gl::Shader *shader,
29 vk::BufferAndMemory *storageOut,
30 sh::BlockLayoutMap *blockLayoutMapOut,
31 size_t *requiredSizeOut)
32{
33 const auto &uniforms = shader->getUniforms(context);
34
35 if (uniforms.empty())
36 {
37 *requiredSizeOut = 0;
38 return gl::NoError();
39 }
40
41 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020042 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040043
44 size_t blockSize = blockEncoder.getBlockSize();
45
46 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
47 if (blockSize == 0)
48 {
49 *requiredSizeOut = 0;
50 return gl::NoError();
51 }
52
53 VkBufferCreateInfo uniformBufferInfo;
54 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
55 uniformBufferInfo.pNext = nullptr;
56 uniformBufferInfo.flags = 0;
57 uniformBufferInfo.size = blockSize;
58 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
59 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
60 uniformBufferInfo.queueFamilyIndexCount = 0;
61 uniformBufferInfo.pQueueFamilyIndices = nullptr;
62
63 ANGLE_TRY(storageOut->buffer.init(device, uniformBufferInfo));
64
Jamie Madill57dd97a2018-02-06 17:10:49 -050065 // Assume host vislble/coherent memory available.
66 VkMemoryPropertyFlags flags =
67 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill57fbfd82018-02-14 12:45:34 -050068
69 ContextVk *contextVk = vk::GetImpl(context);
70
71 ANGLE_TRY(AllocateBufferMemory(contextVk->getRenderer(), flags, &storageOut->buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -040072 &storageOut->memory, requiredSizeOut));
73
74 return gl::NoError();
75}
76
77template <typename T>
78void UpdateDefaultUniformBlock(GLsizei count,
79 int componentCount,
80 const T *v,
81 const sh::BlockMemberInfo &layoutInfo,
82 angle::MemoryBuffer *uniformData)
83{
84 // Assume an offset of -1 means the block is unused.
85 if (layoutInfo.offset == -1)
86 {
87 return;
88 }
89
90 int elementSize = sizeof(T) * componentCount;
91 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
92 {
93 uint8_t *writePtr = uniformData->data() + layoutInfo.offset;
94 memcpy(writePtr, v, elementSize * count);
95 }
96 else
97 {
98 UNIMPLEMENTED();
99 }
100}
101
102vk::Error SyncDefaultUniformBlock(VkDevice device,
103 vk::DeviceMemory *bufferMemory,
104 const angle::MemoryBuffer &bufferData)
105{
106 ASSERT(bufferMemory->valid() && !bufferData.empty());
107 uint8_t *mapPointer = nullptr;
108 ANGLE_TRY(bufferMemory->map(device, 0, bufferData.size(), 0, &mapPointer));
109 memcpy(mapPointer, bufferData.data(), bufferData.size());
110 bufferMemory->unmap(device);
111 return vk::NoError();
112}
113
114enum ShaderIndex : uint32_t
115{
116 MinShaderIndex = 0,
117 VertexShader = MinShaderIndex,
118 FragmentShader = 1,
119 MaxShaderIndex = 2,
120};
121
122gl::Shader *GetShader(const gl::ProgramState &programState, uint32_t shaderIndex)
123{
124 switch (shaderIndex)
125 {
126 case VertexShader:
127 return programState.getAttachedVertexShader();
128 case FragmentShader:
129 return programState.getAttachedFragmentShader();
130 default:
131 UNREACHABLE();
132 return nullptr;
133 }
134}
135
136} // anonymous namespace
137
138ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
139 : storage(), uniformData(), uniformsDirty(false), uniformLayout()
140{
141}
142
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500143ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
144{
145}
146
Jamie Madill76e471e2017-10-21 09:56:01 -0400147ProgramVk::ProgramVk(const gl::ProgramState &state)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500148 : ProgramImpl(state), mDefaultUniformBlocks(), mUsedDescriptorSetRange(), mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400149{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500150 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400151}
152
153ProgramVk::~ProgramVk()
154{
155}
156
Jamie Madillc564c072017-06-01 12:45:42 -0400157void ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500158{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500159 ContextVk *contextVk = vk::GetImpl(contextImpl);
160 reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400161}
Jamie Madill5deea722017-02-16 10:44:46 -0500162
Jamie Madill67ae6c52018-03-09 11:49:01 -0500163void ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400164{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500165 // TODO(jmadill): Handle re-linking a program that is in-use. http://anglebug.com/2397
166
167 VkDevice device = contextVk->getDevice();
168
Jamie Madill76e471e2017-10-21 09:56:01 -0400169 for (auto &uniformBlock : mDefaultUniformBlocks)
170 {
171 uniformBlock.storage.memory.destroy(device);
172 uniformBlock.storage.buffer.destroy(device);
173 }
174
175 mEmptyUniformBlockStorage.memory.destroy(device);
176 mEmptyUniformBlockStorage.buffer.destroy(device);
177
Jamie Madill5deea722017-02-16 10:44:46 -0500178 mLinkedFragmentModule.destroy(device);
179 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500180 mVertexModuleSerial = Serial();
181 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400182
Jamie Madill67ae6c52018-03-09 11:49:01 -0500183 // Free our descriptor set handles.
184 if (!mDescriptorSets.empty())
185 {
186 vk::DescriptorPool *descriptorPool = contextVk->getDescriptorPool();
187 vkFreeDescriptorSets(device, descriptorPool->getHandle(),
188 static_cast<uint32_t>(mDescriptorSets.size()), mDescriptorSets.data());
189 }
Jamie Madill5547b382017-10-23 18:16:01 -0400190 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500191 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400192 mDirtyTextures = false;
Jamie Madill5deea722017-02-16 10:44:46 -0500193}
194
Jamie Madill9cf9e872017-06-05 12:59:25 -0400195gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
196 gl::InfoLog &infoLog,
197 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400198{
199 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500200 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400201}
202
Jamie Madill27a60632017-06-30 15:12:01 -0400203void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400204{
205 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400206}
207
208void ProgramVk::setBinaryRetrievableHint(bool retrievable)
209{
210 UNIMPLEMENTED();
211}
212
Yunchao He61afff12017-03-14 15:34:03 +0800213void ProgramVk::setSeparable(bool separable)
214{
215 UNIMPLEMENTED();
216}
217
Jamie Madill9cf9e872017-06-05 12:59:25 -0400218gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500219 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400220 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400221{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400222 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400223 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500224 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400225 VkDevice device = renderer->getDevice();
226
Jamie Madill67ae6c52018-03-09 11:49:01 -0500227 reset(contextVk);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500228
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500229 std::vector<uint32_t> vertexCode;
230 std::vector<uint32_t> fragmentCode;
231 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500232 ANGLE_TRY_RESULT(
233 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
234 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500235 if (!linkSuccess)
236 {
237 return false;
238 }
239
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500240 {
241 VkShaderModuleCreateInfo vertexShaderInfo;
242 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
243 vertexShaderInfo.pNext = nullptr;
244 vertexShaderInfo.flags = 0;
245 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
246 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400247
248 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500249 mVertexModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500250 }
251
252 {
253 VkShaderModuleCreateInfo fragmentShaderInfo;
254 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
255 fragmentShaderInfo.pNext = nullptr;
256 fragmentShaderInfo.flags = 0;
257 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
258 fragmentShaderInfo.pCode = fragmentCode.data();
259
Jamie Madillc5143482017-10-15 20:20:06 -0400260 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500261 mFragmentModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500262 }
263
Jamie Madill76e471e2017-10-21 09:56:01 -0400264 ANGLE_TRY(initDescriptorSets(contextVk));
265 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500266
Jamie Madill8c3988c2017-12-21 14:44:56 -0500267 if (!mState.getSamplerUniformRange().empty())
268 {
269 // Ensure the descriptor set range includes the textures at position 1.
270 mUsedDescriptorSetRange.extend(1);
271 mDirtyTextures = true;
272 }
273
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500274 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400275}
276
Jamie Madill76e471e2017-10-21 09:56:01 -0400277gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
278{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400279 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500280 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400281 VkDevice device = contextVk->getDevice();
282
283 // Process vertex and fragment uniforms into std140 packing.
284 std::array<sh::BlockLayoutMap, 2> layoutMap;
285 std::array<size_t, 2> requiredBufferSize = {{0, 0}};
286
287 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
288 {
289 ANGLE_TRY(InitDefaultUniformBlock(glContext, device, GetShader(mState, shaderIndex),
290 &mDefaultUniformBlocks[shaderIndex].storage,
291 &layoutMap[shaderIndex],
292 &requiredBufferSize[shaderIndex]));
293 }
294
295 // Init the default block layout info.
296 const auto &locations = mState.getUniformLocations();
297 const auto &uniforms = mState.getUniforms();
298 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
299 {
300 std::array<sh::BlockMemberInfo, 2> layoutInfo;
301
302 const auto &location = locations[locationIndex];
303 if (location.used() && !location.ignored)
304 {
Jamie Madillde03e002017-10-21 14:04:20 -0400305 const auto &uniform = uniforms[location.index];
306
307 if (uniform.isSampler())
308 continue;
309
Jamie Madill76e471e2017-10-21 09:56:01 -0400310 std::string uniformName = uniform.name;
311 if (uniform.isArray())
312 {
Olli Etuaho1734e172017-10-27 15:30:27 +0300313 uniformName += ArrayString(location.arrayIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400314 }
315
316 bool found = false;
317
318 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
319 {
320 auto it = layoutMap[shaderIndex].find(uniformName);
321 if (it != layoutMap[shaderIndex].end())
322 {
323 found = true;
324 layoutInfo[shaderIndex] = it->second;
325 }
326 }
327
328 ASSERT(found);
329 }
330
331 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
332 {
333 mDefaultUniformBlocks[shaderIndex].uniformLayout.push_back(layoutInfo[shaderIndex]);
334 }
335 }
336
337 bool anyDirty = false;
338 bool allDirty = true;
339
340 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
341 {
342 if (requiredBufferSize[shaderIndex] > 0)
343 {
344 if (!mDefaultUniformBlocks[shaderIndex].uniformData.resize(
345 requiredBufferSize[shaderIndex]))
346 {
347 return gl::OutOfMemory() << "Memory allocation failure.";
348 }
349 mDefaultUniformBlocks[shaderIndex].uniformData.fill(0);
350 mDefaultUniformBlocks[shaderIndex].uniformsDirty = true;
351
352 anyDirty = true;
353 }
354 else
355 {
356 allDirty = false;
357 }
358 }
359
360 if (anyDirty)
361 {
362 // Initialize the "empty" uniform block if necessary.
363 if (!allDirty)
364 {
365 VkBufferCreateInfo uniformBufferInfo;
366 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
367 uniformBufferInfo.pNext = nullptr;
368 uniformBufferInfo.flags = 0;
369 uniformBufferInfo.size = 1;
370 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
371 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
372 uniformBufferInfo.queueFamilyIndexCount = 0;
373 uniformBufferInfo.pQueueFamilyIndices = nullptr;
374
375 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
376
Jamie Madill57dd97a2018-02-06 17:10:49 -0500377 // Assume host vislble/coherent memory available.
378 VkMemoryPropertyFlags flags =
379 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400380 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500381 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400382 &mEmptyUniformBlockStorage.memory, &requiredSize));
383 }
384
385 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
Jamie Madill8c3988c2017-12-21 14:44:56 -0500386
387 // Ensure the descriptor set range includes the uniform buffers at position 0.
388 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400389 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400390
391 return gl::NoError();
392}
393
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400394GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
395{
396 UNIMPLEMENTED();
397 return GLboolean();
398}
399
Jamie Madill76e471e2017-10-21 09:56:01 -0400400template <typename T>
401void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
402{
403 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
404 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
405
406 if (linkedUniform.type == entryPointType)
407 {
408 for (auto &uniformBlock : mDefaultUniformBlocks)
409 {
410 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
411 UpdateDefaultUniformBlock(count, linkedUniform.typeInfo->componentCount, v, layoutInfo,
412 &uniformBlock.uniformData);
413 }
414 }
415 else
416 {
417 ASSERT(linkedUniform.type == gl::VariableBoolVectorType(entryPointType));
418 UNIMPLEMENTED();
419 }
420}
421
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400422void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
423{
Jamie Madill76e471e2017-10-21 09:56:01 -0400424 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400425}
426
427void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
428{
Jamie Madill76e471e2017-10-21 09:56:01 -0400429 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400430}
431
432void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
433{
Jamie Madill76e471e2017-10-21 09:56:01 -0400434 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400435}
436
437void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
438{
Jamie Madill76e471e2017-10-21 09:56:01 -0400439 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400440}
441
442void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
443{
444 UNIMPLEMENTED();
445}
446
447void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
448{
449 UNIMPLEMENTED();
450}
451
452void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
453{
454 UNIMPLEMENTED();
455}
456
457void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
458{
459 UNIMPLEMENTED();
460}
461
462void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
463{
464 UNIMPLEMENTED();
465}
466
467void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
468{
469 UNIMPLEMENTED();
470}
471
472void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
473{
474 UNIMPLEMENTED();
475}
476
477void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
478{
479 UNIMPLEMENTED();
480}
481
482void ProgramVk::setUniformMatrix2fv(GLint location,
483 GLsizei count,
484 GLboolean transpose,
485 const GLfloat *value)
486{
487 UNIMPLEMENTED();
488}
489
490void ProgramVk::setUniformMatrix3fv(GLint location,
491 GLsizei count,
492 GLboolean transpose,
493 const GLfloat *value)
494{
495 UNIMPLEMENTED();
496}
497
498void ProgramVk::setUniformMatrix4fv(GLint location,
499 GLsizei count,
500 GLboolean transpose,
501 const GLfloat *value)
502{
503 UNIMPLEMENTED();
504}
505
506void ProgramVk::setUniformMatrix2x3fv(GLint location,
507 GLsizei count,
508 GLboolean transpose,
509 const GLfloat *value)
510{
511 UNIMPLEMENTED();
512}
513
514void ProgramVk::setUniformMatrix3x2fv(GLint location,
515 GLsizei count,
516 GLboolean transpose,
517 const GLfloat *value)
518{
519 UNIMPLEMENTED();
520}
521
522void ProgramVk::setUniformMatrix2x4fv(GLint location,
523 GLsizei count,
524 GLboolean transpose,
525 const GLfloat *value)
526{
527 UNIMPLEMENTED();
528}
529
530void ProgramVk::setUniformMatrix4x2fv(GLint location,
531 GLsizei count,
532 GLboolean transpose,
533 const GLfloat *value)
534{
535 UNIMPLEMENTED();
536}
537
538void ProgramVk::setUniformMatrix3x4fv(GLint location,
539 GLsizei count,
540 GLboolean transpose,
541 const GLfloat *value)
542{
543 UNIMPLEMENTED();
544}
545
546void ProgramVk::setUniformMatrix4x3fv(GLint location,
547 GLsizei count,
548 GLboolean transpose,
549 const GLfloat *value)
550{
551 UNIMPLEMENTED();
552}
553
554void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
555{
556 UNIMPLEMENTED();
557}
558
Sami Väisänen46eaa942016-06-29 10:26:37 +0300559void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
560 GLenum genMode,
561 GLint components,
562 const GLfloat *coeffs)
563{
564 UNIMPLEMENTED();
565}
566
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500567const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
568{
569 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
570 return mLinkedVertexModule;
571}
572
Jamie Madillf2f6d372018-01-10 21:37:23 -0500573Serial ProgramVk::getVertexModuleSerial() const
574{
575 return mVertexModuleSerial;
576}
577
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500578const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
579{
580 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
581 return mLinkedFragmentModule;
582}
583
Jamie Madillf2f6d372018-01-10 21:37:23 -0500584Serial ProgramVk::getFragmentModuleSerial() const
585{
586 return mFragmentModuleSerial;
587}
588
Jamie Madill76e471e2017-10-21 09:56:01 -0400589vk::Error ProgramVk::initDescriptorSets(ContextVk *contextVk)
590{
Jamie Madill5547b382017-10-23 18:16:01 -0400591 ASSERT(mDescriptorSets.empty());
Jamie Madill76e471e2017-10-21 09:56:01 -0400592
Jamie Madill8c3988c2017-12-21 14:44:56 -0500593 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400594 VkDevice device = contextVk->getDevice();
595
596 // Write out to a new a descriptor set.
597 // TODO(jmadill): Handle descriptor set lifetime.
598 vk::DescriptorPool *descriptorPool = contextVk->getDescriptorPool();
599
Jamie Madill8c3988c2017-12-21 14:44:56 -0500600 const auto &descriptorSetLayouts = renderer->getGraphicsDescriptorSetLayouts();
601
602 uint32_t descriptorSetCount = static_cast<uint32_t>(descriptorSetLayouts.size());
Jamie Madill76e471e2017-10-21 09:56:01 -0400603
Jamie Madill5547b382017-10-23 18:16:01 -0400604 VkDescriptorSetAllocateInfo allocInfo;
605 allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
606 allocInfo.pNext = nullptr;
607 allocInfo.descriptorPool = descriptorPool->getHandle();
608 allocInfo.descriptorSetCount = descriptorSetCount;
Jamie Madill8c3988c2017-12-21 14:44:56 -0500609 allocInfo.pSetLayouts = descriptorSetLayouts[0].ptr();
Jamie Madill76e471e2017-10-21 09:56:01 -0400610
Jamie Madill5547b382017-10-23 18:16:01 -0400611 mDescriptorSets.resize(descriptorSetCount, VK_NULL_HANDLE);
612 ANGLE_TRY(descriptorPool->allocateDescriptorSets(device, allocInfo, &mDescriptorSets[0]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400613 return vk::NoError();
614}
615
Jamie Madill54164b02017-08-28 15:17:37 -0400616void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
617{
618 UNIMPLEMENTED();
619}
620
621void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
622{
623 UNIMPLEMENTED();
624}
625
626void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
627{
628 UNIMPLEMENTED();
629}
630
Jamie Madill76e471e2017-10-21 09:56:01 -0400631vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
632{
633 if (!mDefaultUniformBlocks[VertexShader].uniformsDirty &&
634 !mDefaultUniformBlocks[FragmentShader].uniformsDirty)
635 {
636 return vk::NoError();
637 }
638
Jamie Madill8c3988c2017-12-21 14:44:56 -0500639 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400640
Jamie Madill76e471e2017-10-21 09:56:01 -0400641 VkDevice device = contextVk->getDevice();
642
643 // Update buffer memory by immediate mapping. This immediate update only works once.
644 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
645 for (auto &uniformBlock : mDefaultUniformBlocks)
646 {
647 if (uniformBlock.uniformsDirty)
648 {
649 ANGLE_TRY(SyncDefaultUniformBlock(device, &uniformBlock.storage.memory,
650 uniformBlock.uniformData));
651 uniformBlock.uniformsDirty = false;
652 }
653 }
654
655 return vk::NoError();
656}
657
658vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
659{
660 std::array<VkDescriptorBufferInfo, 2> descriptorBufferInfo;
661 std::array<VkWriteDescriptorSet, 2> writeDescriptorInfo;
662 uint32_t bufferCount = 0;
663
664 for (auto &uniformBlock : mDefaultUniformBlocks)
665 {
666 auto &bufferInfo = descriptorBufferInfo[bufferCount];
667
668 if (!uniformBlock.uniformData.empty())
669 {
670 bufferInfo.buffer = uniformBlock.storage.buffer.getHandle();
671 }
672 else
673 {
674 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
675 }
676
677 bufferInfo.offset = 0;
678 bufferInfo.range = VK_WHOLE_SIZE;
679
680 auto &writeInfo = writeDescriptorInfo[bufferCount];
681
682 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
683 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400684 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill76e471e2017-10-21 09:56:01 -0400685 writeInfo.dstBinding = bufferCount;
686 writeInfo.dstArrayElement = 0;
687 writeInfo.descriptorCount = 1;
688 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
689 writeInfo.pImageInfo = nullptr;
690 writeInfo.pBufferInfo = &bufferInfo;
691 writeInfo.pTexelBufferView = nullptr;
692
693 bufferCount++;
694 }
695
696 VkDevice device = contextVk->getDevice();
697
698 vkUpdateDescriptorSets(device, bufferCount, writeDescriptorInfo.data(), 0, nullptr);
699
700 return vk::NoError();
701}
702
Jamie Madill5547b382017-10-23 18:16:01 -0400703const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400704{
Jamie Madill5547b382017-10-23 18:16:01 -0400705 return mDescriptorSets;
706}
707
Jamie Madill8c3988c2017-12-21 14:44:56 -0500708const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400709{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500710 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400711}
712
713void ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
714{
715 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
716 {
717 return;
718 }
719
Jamie Madill8c3988c2017-12-21 14:44:56 -0500720 ASSERT(mUsedDescriptorSetRange.contains(1));
721 VkDescriptorSet descriptorSet = mDescriptorSets[1];
Jamie Madill5547b382017-10-23 18:16:01 -0400722
723 // TODO(jmadill): Don't hard-code the texture limit.
724 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
725 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
726 uint32_t imageCount = 0;
727
728 const gl::State &glState = contextVk->getGLState();
729 const auto &completeTextures = glState.getCompleteTextureCache();
730
731 for (const auto &samplerBinding : mState.getSamplerBindings())
732 {
733 ASSERT(!samplerBinding.unreferenced);
734
735 // TODO(jmadill): Sampler arrays
736 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
737
738 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
739 const gl::Texture *texture = completeTextures[textureUnit];
740
741 // TODO(jmadill): Incomplete textures handling.
742 ASSERT(texture);
743
Jamie Madille1f3ad42017-10-28 23:00:42 -0400744 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill5547b382017-10-23 18:16:01 -0400745 const vk::Image &image = textureVk->getImage();
746
747 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
748
749 imageInfo.sampler = textureVk->getSampler().getHandle();
750 imageInfo.imageView = textureVk->getImageView().getHandle();
751 imageInfo.imageLayout = image.getCurrentLayout();
752
753 auto &writeInfo = writeDescriptorInfo[imageCount];
754
755 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
756 writeInfo.pNext = nullptr;
757 writeInfo.dstSet = descriptorSet;
758 writeInfo.dstBinding = imageCount;
759 writeInfo.dstArrayElement = 0;
760 writeInfo.descriptorCount = 1;
761 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
762 writeInfo.pImageInfo = &imageInfo;
763 writeInfo.pBufferInfo = nullptr;
764 writeInfo.pTexelBufferView = nullptr;
765
766 imageCount++;
767 }
768
769 VkDevice device = contextVk->getDevice();
770
771 ASSERT(imageCount > 0);
772 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
773
774 mDirtyTextures = false;
775}
776
777void ProgramVk::invalidateTextures()
778{
779 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400780}
781
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400782} // namespace rx