blob: d942cd6278fe37a7a5cbe44d9833a3830d7f7b71 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
16#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
17#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040018#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
22
Jamie Madill76e471e2017-10-21 09:56:01 -040023namespace
24{
25
26gl::Error InitDefaultUniformBlock(const gl::Context *context,
27 VkDevice device,
28 gl::Shader *shader,
29 vk::BufferAndMemory *storageOut,
30 sh::BlockLayoutMap *blockLayoutMapOut,
31 size_t *requiredSizeOut)
32{
33 const auto &uniforms = shader->getUniforms(context);
34
35 if (uniforms.empty())
36 {
37 *requiredSizeOut = 0;
38 return gl::NoError();
39 }
40
41 sh::Std140BlockEncoder blockEncoder;
42 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, false, blockLayoutMapOut);
43
44 size_t blockSize = blockEncoder.getBlockSize();
45
46 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
47 if (blockSize == 0)
48 {
49 *requiredSizeOut = 0;
50 return gl::NoError();
51 }
52
53 VkBufferCreateInfo uniformBufferInfo;
54 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
55 uniformBufferInfo.pNext = nullptr;
56 uniformBufferInfo.flags = 0;
57 uniformBufferInfo.size = blockSize;
58 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
59 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
60 uniformBufferInfo.queueFamilyIndexCount = 0;
61 uniformBufferInfo.pQueueFamilyIndices = nullptr;
62
63 ANGLE_TRY(storageOut->buffer.init(device, uniformBufferInfo));
64
65 ANGLE_TRY(AllocateBufferMemory(GetImplAs<ContextVk>(context), blockSize, &storageOut->buffer,
66 &storageOut->memory, requiredSizeOut));
67
68 return gl::NoError();
69}
70
71template <typename T>
72void UpdateDefaultUniformBlock(GLsizei count,
73 int componentCount,
74 const T *v,
75 const sh::BlockMemberInfo &layoutInfo,
76 angle::MemoryBuffer *uniformData)
77{
78 // Assume an offset of -1 means the block is unused.
79 if (layoutInfo.offset == -1)
80 {
81 return;
82 }
83
84 int elementSize = sizeof(T) * componentCount;
85 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
86 {
87 uint8_t *writePtr = uniformData->data() + layoutInfo.offset;
88 memcpy(writePtr, v, elementSize * count);
89 }
90 else
91 {
92 UNIMPLEMENTED();
93 }
94}
95
96vk::Error SyncDefaultUniformBlock(VkDevice device,
97 vk::DeviceMemory *bufferMemory,
98 const angle::MemoryBuffer &bufferData)
99{
100 ASSERT(bufferMemory->valid() && !bufferData.empty());
101 uint8_t *mapPointer = nullptr;
102 ANGLE_TRY(bufferMemory->map(device, 0, bufferData.size(), 0, &mapPointer));
103 memcpy(mapPointer, bufferData.data(), bufferData.size());
104 bufferMemory->unmap(device);
105 return vk::NoError();
106}
107
108enum ShaderIndex : uint32_t
109{
110 MinShaderIndex = 0,
111 VertexShader = MinShaderIndex,
112 FragmentShader = 1,
113 MaxShaderIndex = 2,
114};
115
116gl::Shader *GetShader(const gl::ProgramState &programState, uint32_t shaderIndex)
117{
118 switch (shaderIndex)
119 {
120 case VertexShader:
121 return programState.getAttachedVertexShader();
122 case FragmentShader:
123 return programState.getAttachedFragmentShader();
124 default:
125 UNREACHABLE();
126 return nullptr;
127 }
128}
129
130} // anonymous namespace
131
132ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
133 : storage(), uniformData(), uniformsDirty(false), uniformLayout()
134{
135}
136
137ProgramVk::ProgramVk(const gl::ProgramState &state)
Jamie Madill5547b382017-10-23 18:16:01 -0400138 : ProgramImpl(state), mDefaultUniformBlocks(), mDescriptorSetOffset(0), mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400139{
140}
141
142ProgramVk::~ProgramVk()
143{
144}
145
Jamie Madillc564c072017-06-01 12:45:42 -0400146void ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500147{
Jamie Madillc564c072017-06-01 12:45:42 -0400148 VkDevice device = GetImplAs<ContextVk>(contextImpl)->getDevice();
Jamie Madillc5143482017-10-15 20:20:06 -0400149 reset(device);
150}
Jamie Madill5deea722017-02-16 10:44:46 -0500151
Jamie Madillc5143482017-10-15 20:20:06 -0400152void ProgramVk::reset(VkDevice device)
153{
Jamie Madill76e471e2017-10-21 09:56:01 -0400154 for (auto &uniformBlock : mDefaultUniformBlocks)
155 {
156 uniformBlock.storage.memory.destroy(device);
157 uniformBlock.storage.buffer.destroy(device);
158 }
159
160 mEmptyUniformBlockStorage.memory.destroy(device);
161 mEmptyUniformBlockStorage.buffer.destroy(device);
162
163 for (auto &descriptorSetLayout : mDescriptorSetLayouts)
164 {
165 descriptorSetLayout.destroy(device);
166 }
167
Jamie Madill5deea722017-02-16 10:44:46 -0500168 mLinkedFragmentModule.destroy(device);
169 mLinkedVertexModule.destroy(device);
170 mPipelineLayout.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -0400171
172 // Descriptor Sets are pool allocated, so do not need to be explicitly freed.
Jamie Madill5547b382017-10-23 18:16:01 -0400173 mDescriptorSets.clear();
174 mDescriptorSetOffset = 0;
175 mDirtyTextures = false;
Jamie Madill5deea722017-02-16 10:44:46 -0500176}
177
Jamie Madill9cf9e872017-06-05 12:59:25 -0400178gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
179 gl::InfoLog &infoLog,
180 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400181{
182 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500183 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400184}
185
Jamie Madill27a60632017-06-30 15:12:01 -0400186void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400187{
188 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400189}
190
191void ProgramVk::setBinaryRetrievableHint(bool retrievable)
192{
193 UNIMPLEMENTED();
194}
195
Yunchao He61afff12017-03-14 15:34:03 +0800196void ProgramVk::setSeparable(bool separable)
197{
198 UNIMPLEMENTED();
199}
200
Jamie Madill9cf9e872017-06-05 12:59:25 -0400201gl::LinkResult ProgramVk::link(const gl::Context *glContext,
202 const gl::VaryingPacking &packing,
203 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400204{
Jamie Madillc5143482017-10-15 20:20:06 -0400205 ContextVk *contextVk = GetImplAs<ContextVk>(glContext);
206 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500207 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400208 VkDevice device = renderer->getDevice();
209
210 reset(device);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500211
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500212 std::vector<uint32_t> vertexCode;
213 std::vector<uint32_t> fragmentCode;
214 bool linkSuccess = false;
Jamie Madill2a9e1072017-09-22 11:31:57 -0400215 ANGLE_TRY_RESULT(glslangWrapper->linkProgram(glContext, mState, &vertexCode, &fragmentCode),
216 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500217 if (!linkSuccess)
218 {
219 return false;
220 }
221
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500222 {
223 VkShaderModuleCreateInfo vertexShaderInfo;
224 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
225 vertexShaderInfo.pNext = nullptr;
226 vertexShaderInfo.flags = 0;
227 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
228 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400229
230 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500231 }
232
233 {
234 VkShaderModuleCreateInfo fragmentShaderInfo;
235 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
236 fragmentShaderInfo.pNext = nullptr;
237 fragmentShaderInfo.flags = 0;
238 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
239 fragmentShaderInfo.pCode = fragmentCode.data();
240
Jamie Madillc5143482017-10-15 20:20:06 -0400241 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500242 }
243
Jamie Madillc5143482017-10-15 20:20:06 -0400244 ANGLE_TRY(initPipelineLayout(contextVk));
Jamie Madill76e471e2017-10-21 09:56:01 -0400245 ANGLE_TRY(initDescriptorSets(contextVk));
246 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500247
248 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400249}
250
Jamie Madill76e471e2017-10-21 09:56:01 -0400251gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
252{
253 ContextVk *contextVk = GetImplAs<ContextVk>(glContext);
254 VkDevice device = contextVk->getDevice();
255
256 // Process vertex and fragment uniforms into std140 packing.
257 std::array<sh::BlockLayoutMap, 2> layoutMap;
258 std::array<size_t, 2> requiredBufferSize = {{0, 0}};
259
260 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
261 {
262 ANGLE_TRY(InitDefaultUniformBlock(glContext, device, GetShader(mState, shaderIndex),
263 &mDefaultUniformBlocks[shaderIndex].storage,
264 &layoutMap[shaderIndex],
265 &requiredBufferSize[shaderIndex]));
266 }
267
268 // Init the default block layout info.
269 const auto &locations = mState.getUniformLocations();
270 const auto &uniforms = mState.getUniforms();
271 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
272 {
273 std::array<sh::BlockMemberInfo, 2> layoutInfo;
274
275 const auto &location = locations[locationIndex];
276 if (location.used() && !location.ignored)
277 {
Jamie Madillde03e002017-10-21 14:04:20 -0400278 const auto &uniform = uniforms[location.index];
279
280 if (uniform.isSampler())
281 continue;
282
Jamie Madill76e471e2017-10-21 09:56:01 -0400283 std::string uniformName = uniform.name;
284 if (uniform.isArray())
285 {
286 uniformName += ArrayIndexString(location.arrayIndices);
287 }
288
289 bool found = false;
290
291 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
292 {
293 auto it = layoutMap[shaderIndex].find(uniformName);
294 if (it != layoutMap[shaderIndex].end())
295 {
296 found = true;
297 layoutInfo[shaderIndex] = it->second;
298 }
299 }
300
301 ASSERT(found);
302 }
303
304 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
305 {
306 mDefaultUniformBlocks[shaderIndex].uniformLayout.push_back(layoutInfo[shaderIndex]);
307 }
308 }
309
310 bool anyDirty = false;
311 bool allDirty = true;
312
313 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
314 {
315 if (requiredBufferSize[shaderIndex] > 0)
316 {
317 if (!mDefaultUniformBlocks[shaderIndex].uniformData.resize(
318 requiredBufferSize[shaderIndex]))
319 {
320 return gl::OutOfMemory() << "Memory allocation failure.";
321 }
322 mDefaultUniformBlocks[shaderIndex].uniformData.fill(0);
323 mDefaultUniformBlocks[shaderIndex].uniformsDirty = true;
324
325 anyDirty = true;
326 }
327 else
328 {
329 allDirty = false;
330 }
331 }
332
333 if (anyDirty)
334 {
335 // Initialize the "empty" uniform block if necessary.
336 if (!allDirty)
337 {
338 VkBufferCreateInfo uniformBufferInfo;
339 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
340 uniformBufferInfo.pNext = nullptr;
341 uniformBufferInfo.flags = 0;
342 uniformBufferInfo.size = 1;
343 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
344 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
345 uniformBufferInfo.queueFamilyIndexCount = 0;
346 uniformBufferInfo.pQueueFamilyIndices = nullptr;
347
348 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
349
350 size_t requiredSize = 0;
351 ANGLE_TRY(AllocateBufferMemory(contextVk, 1, &mEmptyUniformBlockStorage.buffer,
352 &mEmptyUniformBlockStorage.memory, &requiredSize));
353 }
354
355 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
356 }
Jamie Madill5547b382017-10-23 18:16:01 -0400357 else
358 {
359 // If the program has no uniforms, note this in the offset.
360 mDescriptorSetOffset = 1;
361 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400362
363 return gl::NoError();
364}
365
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400366GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
367{
368 UNIMPLEMENTED();
369 return GLboolean();
370}
371
Jamie Madill76e471e2017-10-21 09:56:01 -0400372template <typename T>
373void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
374{
375 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
376 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
377
378 if (linkedUniform.type == entryPointType)
379 {
380 for (auto &uniformBlock : mDefaultUniformBlocks)
381 {
382 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
383 UpdateDefaultUniformBlock(count, linkedUniform.typeInfo->componentCount, v, layoutInfo,
384 &uniformBlock.uniformData);
385 }
386 }
387 else
388 {
389 ASSERT(linkedUniform.type == gl::VariableBoolVectorType(entryPointType));
390 UNIMPLEMENTED();
391 }
392}
393
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400394void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
395{
Jamie Madill76e471e2017-10-21 09:56:01 -0400396 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400397}
398
399void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
400{
Jamie Madill76e471e2017-10-21 09:56:01 -0400401 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400402}
403
404void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
405{
Jamie Madill76e471e2017-10-21 09:56:01 -0400406 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400407}
408
409void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
410{
Jamie Madill76e471e2017-10-21 09:56:01 -0400411 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400412}
413
414void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
415{
416 UNIMPLEMENTED();
417}
418
419void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
420{
421 UNIMPLEMENTED();
422}
423
424void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
425{
426 UNIMPLEMENTED();
427}
428
429void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
430{
431 UNIMPLEMENTED();
432}
433
434void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
435{
436 UNIMPLEMENTED();
437}
438
439void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
440{
441 UNIMPLEMENTED();
442}
443
444void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
445{
446 UNIMPLEMENTED();
447}
448
449void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
450{
451 UNIMPLEMENTED();
452}
453
454void ProgramVk::setUniformMatrix2fv(GLint location,
455 GLsizei count,
456 GLboolean transpose,
457 const GLfloat *value)
458{
459 UNIMPLEMENTED();
460}
461
462void ProgramVk::setUniformMatrix3fv(GLint location,
463 GLsizei count,
464 GLboolean transpose,
465 const GLfloat *value)
466{
467 UNIMPLEMENTED();
468}
469
470void ProgramVk::setUniformMatrix4fv(GLint location,
471 GLsizei count,
472 GLboolean transpose,
473 const GLfloat *value)
474{
475 UNIMPLEMENTED();
476}
477
478void ProgramVk::setUniformMatrix2x3fv(GLint location,
479 GLsizei count,
480 GLboolean transpose,
481 const GLfloat *value)
482{
483 UNIMPLEMENTED();
484}
485
486void ProgramVk::setUniformMatrix3x2fv(GLint location,
487 GLsizei count,
488 GLboolean transpose,
489 const GLfloat *value)
490{
491 UNIMPLEMENTED();
492}
493
494void ProgramVk::setUniformMatrix2x4fv(GLint location,
495 GLsizei count,
496 GLboolean transpose,
497 const GLfloat *value)
498{
499 UNIMPLEMENTED();
500}
501
502void ProgramVk::setUniformMatrix4x2fv(GLint location,
503 GLsizei count,
504 GLboolean transpose,
505 const GLfloat *value)
506{
507 UNIMPLEMENTED();
508}
509
510void ProgramVk::setUniformMatrix3x4fv(GLint location,
511 GLsizei count,
512 GLboolean transpose,
513 const GLfloat *value)
514{
515 UNIMPLEMENTED();
516}
517
518void ProgramVk::setUniformMatrix4x3fv(GLint location,
519 GLsizei count,
520 GLboolean transpose,
521 const GLfloat *value)
522{
523 UNIMPLEMENTED();
524}
525
526void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
527{
528 UNIMPLEMENTED();
529}
530
Olli Etuaho855d9642017-05-17 14:05:06 +0300531bool ProgramVk::getUniformBlockSize(const std::string &blockName,
532 const std::string &blockMappedName,
533 size_t *sizeOut) const
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400534{
535 UNIMPLEMENTED();
536 return bool();
537}
538
539bool ProgramVk::getUniformBlockMemberInfo(const std::string &memberUniformName,
Olli Etuaho855d9642017-05-17 14:05:06 +0300540 const std::string &memberUniformMappedName,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400541 sh::BlockMemberInfo *memberInfoOut) const
542{
543 UNIMPLEMENTED();
544 return bool();
545}
546
Sami Väisänen46eaa942016-06-29 10:26:37 +0300547void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
548 GLenum genMode,
549 GLint components,
550 const GLfloat *coeffs)
551{
552 UNIMPLEMENTED();
553}
554
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500555const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
556{
557 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
558 return mLinkedVertexModule;
559}
560
561const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
562{
563 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
564 return mLinkedFragmentModule;
565}
566
Jamie Madillc5143482017-10-15 20:20:06 -0400567const vk::PipelineLayout &ProgramVk::getPipelineLayout() const
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500568{
Jamie Madillc5143482017-10-15 20:20:06 -0400569 return mPipelineLayout;
570}
571
572vk::Error ProgramVk::initPipelineLayout(ContextVk *context)
573{
574 ASSERT(!mPipelineLayout.valid());
575
576 VkDevice device = context->getDevice();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500577
Jamie Madill76e471e2017-10-21 09:56:01 -0400578 // Create two descriptor set layouts: one for default uniform info, and one for textures.
579 // Skip one or both if there are no uniforms.
580 VkDescriptorSetLayoutBinding uniformBindings[2];
581 uint32_t blockCount = 0;
582
583 {
584 auto &layoutBinding = uniformBindings[blockCount];
585
586 layoutBinding.binding = blockCount;
587 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
588 layoutBinding.descriptorCount = 1;
589 layoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
590 layoutBinding.pImmutableSamplers = nullptr;
591
592 blockCount++;
593 }
594
595 {
596 auto &layoutBinding = uniformBindings[blockCount];
597
598 layoutBinding.binding = blockCount;
599 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
600 layoutBinding.descriptorCount = 1;
601 layoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
602 layoutBinding.pImmutableSamplers = nullptr;
603
604 blockCount++;
605 }
606
607 {
608 VkDescriptorSetLayoutCreateInfo uniformInfo;
609 uniformInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
610 uniformInfo.pNext = nullptr;
611 uniformInfo.flags = 0;
612 uniformInfo.bindingCount = blockCount;
613 uniformInfo.pBindings = uniformBindings;
614
615 vk::DescriptorSetLayout uniformLayout;
616 ANGLE_TRY(uniformLayout.init(device, uniformInfo));
617 mDescriptorSetLayouts.push_back(std::move(uniformLayout));
618 }
619
Jamie Madill5547b382017-10-23 18:16:01 -0400620 const auto &samplerBindings = mState.getSamplerBindings();
621
622 if (!samplerBindings.empty())
623 {
624 std::vector<VkDescriptorSetLayoutBinding> textureBindings;
625 uint32_t textureCount = 0;
626 const auto &uniforms = mState.getUniforms();
627 for (unsigned int uniformIndex : mState.getSamplerUniformRange())
628 {
629 const gl::LinkedUniform &samplerUniform = uniforms[uniformIndex];
630 unsigned int samplerIndex = mState.getSamplerIndexFromUniformIndex(uniformIndex);
631 const gl::SamplerBinding &samplerBinding = samplerBindings[samplerIndex];
632
633 ASSERT(!samplerBinding.unreferenced);
634
635 VkDescriptorSetLayoutBinding layoutBinding;
636
637 uint32_t elementCount = samplerUniform.elementCount();
638
639 layoutBinding.binding = textureCount;
640 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
641 layoutBinding.descriptorCount = elementCount;
642
643 layoutBinding.stageFlags = 0;
644 if (samplerUniform.vertexStaticUse)
645 {
646 layoutBinding.stageFlags |= VK_SHADER_STAGE_VERTEX_BIT;
647 }
648 if (samplerUniform.fragmentStaticUse)
649 {
650 layoutBinding.stageFlags |= VK_SHADER_STAGE_FRAGMENT_BIT;
651 }
652
653 layoutBinding.pImmutableSamplers = nullptr;
654
655 textureCount += elementCount;
656
657 textureBindings.push_back(layoutBinding);
658 }
659
660 VkDescriptorSetLayoutCreateInfo textureInfo;
661 textureInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
662 textureInfo.pNext = nullptr;
663 textureInfo.flags = 0;
664 textureInfo.bindingCount = static_cast<uint32_t>(textureBindings.size());
665 textureInfo.pBindings = textureBindings.data();
666
667 vk::DescriptorSetLayout textureLayout;
668 ANGLE_TRY(textureLayout.init(device, textureInfo));
669 mDescriptorSetLayouts.push_back(std::move(textureLayout));
670
671 mDirtyTextures = true;
672 }
673
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500674 VkPipelineLayoutCreateInfo createInfo;
675 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
676 createInfo.pNext = nullptr;
677 createInfo.flags = 0;
Jamie Madill76e471e2017-10-21 09:56:01 -0400678 createInfo.setLayoutCount = static_cast<uint32_t>(mDescriptorSetLayouts.size());
679 createInfo.pSetLayouts = mDescriptorSetLayouts[0].ptr();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500680 createInfo.pushConstantRangeCount = 0;
681 createInfo.pPushConstantRanges = nullptr;
682
Jamie Madillc5143482017-10-15 20:20:06 -0400683 ANGLE_TRY(mPipelineLayout.init(device, createInfo));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500684
Jamie Madillc5143482017-10-15 20:20:06 -0400685 return vk::NoError();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500686}
687
Jamie Madill76e471e2017-10-21 09:56:01 -0400688vk::Error ProgramVk::initDescriptorSets(ContextVk *contextVk)
689{
Jamie Madill5547b382017-10-23 18:16:01 -0400690 ASSERT(mDescriptorSets.empty());
Jamie Madill76e471e2017-10-21 09:56:01 -0400691
692 VkDevice device = contextVk->getDevice();
693
694 // Write out to a new a descriptor set.
695 // TODO(jmadill): Handle descriptor set lifetime.
696 vk::DescriptorPool *descriptorPool = contextVk->getDescriptorPool();
697
Jamie Madill5547b382017-10-23 18:16:01 -0400698 uint32_t descriptorSetCount = static_cast<uint32_t>(mDescriptorSetLayouts.size());
Jamie Madill76e471e2017-10-21 09:56:01 -0400699
Jamie Madill5547b382017-10-23 18:16:01 -0400700 VkDescriptorSetAllocateInfo allocInfo;
701 allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
702 allocInfo.pNext = nullptr;
703 allocInfo.descriptorPool = descriptorPool->getHandle();
704 allocInfo.descriptorSetCount = descriptorSetCount;
Jamie Madill76e471e2017-10-21 09:56:01 -0400705 allocInfo.pSetLayouts = mDescriptorSetLayouts[0].ptr();
706
Jamie Madill5547b382017-10-23 18:16:01 -0400707 mDescriptorSets.resize(descriptorSetCount, VK_NULL_HANDLE);
708 ANGLE_TRY(descriptorPool->allocateDescriptorSets(device, allocInfo, &mDescriptorSets[0]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400709 return vk::NoError();
710}
711
Jamie Madill54164b02017-08-28 15:17:37 -0400712void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
713{
714 UNIMPLEMENTED();
715}
716
717void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
718{
719 UNIMPLEMENTED();
720}
721
722void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
723{
724 UNIMPLEMENTED();
725}
726
Jamie Madill76e471e2017-10-21 09:56:01 -0400727vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
728{
729 if (!mDefaultUniformBlocks[VertexShader].uniformsDirty &&
730 !mDefaultUniformBlocks[FragmentShader].uniformsDirty)
731 {
732 return vk::NoError();
733 }
734
Jamie Madill5547b382017-10-23 18:16:01 -0400735 ASSERT(mDescriptorSetOffset == 0);
736
Jamie Madill76e471e2017-10-21 09:56:01 -0400737 VkDevice device = contextVk->getDevice();
738
739 // Update buffer memory by immediate mapping. This immediate update only works once.
740 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
741 for (auto &uniformBlock : mDefaultUniformBlocks)
742 {
743 if (uniformBlock.uniformsDirty)
744 {
745 ANGLE_TRY(SyncDefaultUniformBlock(device, &uniformBlock.storage.memory,
746 uniformBlock.uniformData));
747 uniformBlock.uniformsDirty = false;
748 }
749 }
750
751 return vk::NoError();
752}
753
754vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
755{
756 std::array<VkDescriptorBufferInfo, 2> descriptorBufferInfo;
757 std::array<VkWriteDescriptorSet, 2> writeDescriptorInfo;
758 uint32_t bufferCount = 0;
759
760 for (auto &uniformBlock : mDefaultUniformBlocks)
761 {
762 auto &bufferInfo = descriptorBufferInfo[bufferCount];
763
764 if (!uniformBlock.uniformData.empty())
765 {
766 bufferInfo.buffer = uniformBlock.storage.buffer.getHandle();
767 }
768 else
769 {
770 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
771 }
772
773 bufferInfo.offset = 0;
774 bufferInfo.range = VK_WHOLE_SIZE;
775
776 auto &writeInfo = writeDescriptorInfo[bufferCount];
777
778 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
779 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400780 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill76e471e2017-10-21 09:56:01 -0400781 writeInfo.dstBinding = bufferCount;
782 writeInfo.dstArrayElement = 0;
783 writeInfo.descriptorCount = 1;
784 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
785 writeInfo.pImageInfo = nullptr;
786 writeInfo.pBufferInfo = &bufferInfo;
787 writeInfo.pTexelBufferView = nullptr;
788
789 bufferCount++;
790 }
791
792 VkDevice device = contextVk->getDevice();
793
794 vkUpdateDescriptorSets(device, bufferCount, writeDescriptorInfo.data(), 0, nullptr);
795
796 return vk::NoError();
797}
798
Jamie Madill5547b382017-10-23 18:16:01 -0400799const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400800{
Jamie Madill5547b382017-10-23 18:16:01 -0400801 return mDescriptorSets;
802}
803
804uint32_t ProgramVk::getDescriptorSetOffset() const
805{
806 return mDescriptorSetOffset;
807}
808
809void ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
810{
811 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
812 {
813 return;
814 }
815
816 VkDescriptorSet descriptorSet = mDescriptorSets.back();
817
818 // TODO(jmadill): Don't hard-code the texture limit.
819 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
820 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
821 uint32_t imageCount = 0;
822
823 const gl::State &glState = contextVk->getGLState();
824 const auto &completeTextures = glState.getCompleteTextureCache();
825
826 for (const auto &samplerBinding : mState.getSamplerBindings())
827 {
828 ASSERT(!samplerBinding.unreferenced);
829
830 // TODO(jmadill): Sampler arrays
831 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
832
833 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
834 const gl::Texture *texture = completeTextures[textureUnit];
835
836 // TODO(jmadill): Incomplete textures handling.
837 ASSERT(texture);
838
839 TextureVk *textureVk = GetImplAs<TextureVk>(texture);
840 const vk::Image &image = textureVk->getImage();
841
842 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
843
844 imageInfo.sampler = textureVk->getSampler().getHandle();
845 imageInfo.imageView = textureVk->getImageView().getHandle();
846 imageInfo.imageLayout = image.getCurrentLayout();
847
848 auto &writeInfo = writeDescriptorInfo[imageCount];
849
850 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
851 writeInfo.pNext = nullptr;
852 writeInfo.dstSet = descriptorSet;
853 writeInfo.dstBinding = imageCount;
854 writeInfo.dstArrayElement = 0;
855 writeInfo.descriptorCount = 1;
856 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
857 writeInfo.pImageInfo = &imageInfo;
858 writeInfo.pBufferInfo = nullptr;
859 writeInfo.pTexelBufferView = nullptr;
860
861 imageCount++;
862 }
863
864 VkDevice device = contextVk->getDevice();
865
866 ASSERT(imageCount > 0);
867 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
868
869 mDirtyTextures = false;
870}
871
872void ProgramVk::invalidateTextures()
873{
874 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400875}
876
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400877} // namespace rx