blob: ced45983fb414978232e9d5e49b50a7050e14c2f [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050015#include "libANGLE/renderer/vulkan/ContextVk.h"
16#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
17#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040018#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
22
Jamie Madill76e471e2017-10-21 09:56:01 -040023namespace
24{
25
26gl::Error InitDefaultUniformBlock(const gl::Context *context,
27 VkDevice device,
28 gl::Shader *shader,
29 vk::BufferAndMemory *storageOut,
30 sh::BlockLayoutMap *blockLayoutMapOut,
31 size_t *requiredSizeOut)
32{
33 const auto &uniforms = shader->getUniforms(context);
34
35 if (uniforms.empty())
36 {
37 *requiredSizeOut = 0;
38 return gl::NoError();
39 }
40
41 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020042 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040043
44 size_t blockSize = blockEncoder.getBlockSize();
45
46 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
47 if (blockSize == 0)
48 {
49 *requiredSizeOut = 0;
50 return gl::NoError();
51 }
52
53 VkBufferCreateInfo uniformBufferInfo;
54 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
55 uniformBufferInfo.pNext = nullptr;
56 uniformBufferInfo.flags = 0;
57 uniformBufferInfo.size = blockSize;
58 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
59 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
60 uniformBufferInfo.queueFamilyIndexCount = 0;
61 uniformBufferInfo.pQueueFamilyIndices = nullptr;
62
63 ANGLE_TRY(storageOut->buffer.init(device, uniformBufferInfo));
64
Jamie Madill57dd97a2018-02-06 17:10:49 -050065 // Assume host vislble/coherent memory available.
66 VkMemoryPropertyFlags flags =
67 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill57fbfd82018-02-14 12:45:34 -050068
69 ContextVk *contextVk = vk::GetImpl(context);
70
71 ANGLE_TRY(AllocateBufferMemory(contextVk->getRenderer(), flags, &storageOut->buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -040072 &storageOut->memory, requiredSizeOut));
73
74 return gl::NoError();
75}
76
77template <typename T>
78void UpdateDefaultUniformBlock(GLsizei count,
79 int componentCount,
80 const T *v,
81 const sh::BlockMemberInfo &layoutInfo,
82 angle::MemoryBuffer *uniformData)
83{
84 // Assume an offset of -1 means the block is unused.
85 if (layoutInfo.offset == -1)
86 {
87 return;
88 }
89
90 int elementSize = sizeof(T) * componentCount;
91 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
92 {
93 uint8_t *writePtr = uniformData->data() + layoutInfo.offset;
94 memcpy(writePtr, v, elementSize * count);
95 }
96 else
97 {
98 UNIMPLEMENTED();
99 }
100}
101
102vk::Error SyncDefaultUniformBlock(VkDevice device,
103 vk::DeviceMemory *bufferMemory,
104 const angle::MemoryBuffer &bufferData)
105{
106 ASSERT(bufferMemory->valid() && !bufferData.empty());
107 uint8_t *mapPointer = nullptr;
108 ANGLE_TRY(bufferMemory->map(device, 0, bufferData.size(), 0, &mapPointer));
109 memcpy(mapPointer, bufferData.data(), bufferData.size());
110 bufferMemory->unmap(device);
111 return vk::NoError();
112}
113
114enum ShaderIndex : uint32_t
115{
116 MinShaderIndex = 0,
117 VertexShader = MinShaderIndex,
118 FragmentShader = 1,
119 MaxShaderIndex = 2,
120};
121
122gl::Shader *GetShader(const gl::ProgramState &programState, uint32_t shaderIndex)
123{
124 switch (shaderIndex)
125 {
126 case VertexShader:
127 return programState.getAttachedVertexShader();
128 case FragmentShader:
129 return programState.getAttachedFragmentShader();
130 default:
131 UNREACHABLE();
132 return nullptr;
133 }
134}
135
136} // anonymous namespace
137
138ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
139 : storage(), uniformData(), uniformsDirty(false), uniformLayout()
140{
141}
142
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500143ProgramVk::DefaultUniformBlock::~DefaultUniformBlock()
144{
145}
146
Jamie Madill76e471e2017-10-21 09:56:01 -0400147ProgramVk::ProgramVk(const gl::ProgramState &state)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500148 : ProgramImpl(state), mDefaultUniformBlocks(), mUsedDescriptorSetRange(), mDirtyTextures(true)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400149{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500150 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400151}
152
153ProgramVk::~ProgramVk()
154{
155}
156
Jamie Madillc564c072017-06-01 12:45:42 -0400157void ProgramVk::destroy(const gl::Context *contextImpl)
Jamie Madill5deea722017-02-16 10:44:46 -0500158{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400159 VkDevice device = vk::GetImpl(contextImpl)->getDevice();
Jamie Madillc5143482017-10-15 20:20:06 -0400160 reset(device);
161}
Jamie Madill5deea722017-02-16 10:44:46 -0500162
Jamie Madillc5143482017-10-15 20:20:06 -0400163void ProgramVk::reset(VkDevice device)
164{
Jamie Madill76e471e2017-10-21 09:56:01 -0400165 for (auto &uniformBlock : mDefaultUniformBlocks)
166 {
167 uniformBlock.storage.memory.destroy(device);
168 uniformBlock.storage.buffer.destroy(device);
169 }
170
171 mEmptyUniformBlockStorage.memory.destroy(device);
172 mEmptyUniformBlockStorage.buffer.destroy(device);
173
Jamie Madill5deea722017-02-16 10:44:46 -0500174 mLinkedFragmentModule.destroy(device);
175 mLinkedVertexModule.destroy(device);
Jamie Madillf2f6d372018-01-10 21:37:23 -0500176 mVertexModuleSerial = Serial();
177 mFragmentModuleSerial = Serial();
Jamie Madill76e471e2017-10-21 09:56:01 -0400178
179 // Descriptor Sets are pool allocated, so do not need to be explicitly freed.
Jamie Madill5547b382017-10-23 18:16:01 -0400180 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500181 mUsedDescriptorSetRange.invalidate();
Jamie Madill5547b382017-10-23 18:16:01 -0400182 mDirtyTextures = false;
Jamie Madill5deea722017-02-16 10:44:46 -0500183}
184
Jamie Madill9cf9e872017-06-05 12:59:25 -0400185gl::LinkResult ProgramVk::load(const gl::Context *contextImpl,
186 gl::InfoLog &infoLog,
187 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400188{
189 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500190 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400191}
192
Jamie Madill27a60632017-06-30 15:12:01 -0400193void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400194{
195 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400196}
197
198void ProgramVk::setBinaryRetrievableHint(bool retrievable)
199{
200 UNIMPLEMENTED();
201}
202
Yunchao He61afff12017-03-14 15:34:03 +0800203void ProgramVk::setSeparable(bool separable)
204{
205 UNIMPLEMENTED();
206}
207
Jamie Madill9cf9e872017-06-05 12:59:25 -0400208gl::LinkResult ProgramVk::link(const gl::Context *glContext,
Jamie Madillc9727f32017-11-07 12:37:07 -0500209 const gl::ProgramLinkedResources &resources,
Jamie Madill9cf9e872017-06-05 12:59:25 -0400210 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400211{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400212 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madillc5143482017-10-15 20:20:06 -0400213 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500214 GlslangWrapper *glslangWrapper = renderer->getGlslangWrapper();
Jamie Madillc5143482017-10-15 20:20:06 -0400215 VkDevice device = renderer->getDevice();
216
217 reset(device);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500218
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500219 std::vector<uint32_t> vertexCode;
220 std::vector<uint32_t> fragmentCode;
221 bool linkSuccess = false;
Jamie Madill4dd167f2017-11-09 13:08:31 -0500222 ANGLE_TRY_RESULT(
223 glslangWrapper->linkProgram(glContext, mState, resources, &vertexCode, &fragmentCode),
224 linkSuccess);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500225 if (!linkSuccess)
226 {
227 return false;
228 }
229
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500230 {
231 VkShaderModuleCreateInfo vertexShaderInfo;
232 vertexShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
233 vertexShaderInfo.pNext = nullptr;
234 vertexShaderInfo.flags = 0;
235 vertexShaderInfo.codeSize = vertexCode.size() * sizeof(uint32_t);
236 vertexShaderInfo.pCode = vertexCode.data();
Jamie Madillc5143482017-10-15 20:20:06 -0400237
238 ANGLE_TRY(mLinkedVertexModule.init(device, vertexShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500239 mVertexModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500240 }
241
242 {
243 VkShaderModuleCreateInfo fragmentShaderInfo;
244 fragmentShaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
245 fragmentShaderInfo.pNext = nullptr;
246 fragmentShaderInfo.flags = 0;
247 fragmentShaderInfo.codeSize = fragmentCode.size() * sizeof(uint32_t);
248 fragmentShaderInfo.pCode = fragmentCode.data();
249
Jamie Madillc5143482017-10-15 20:20:06 -0400250 ANGLE_TRY(mLinkedFragmentModule.init(device, fragmentShaderInfo));
Jamie Madillf2f6d372018-01-10 21:37:23 -0500251 mFragmentModuleSerial = renderer->issueProgramSerial();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500252 }
253
Jamie Madill76e471e2017-10-21 09:56:01 -0400254 ANGLE_TRY(initDescriptorSets(contextVk));
255 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500256
Jamie Madill8c3988c2017-12-21 14:44:56 -0500257 if (!mState.getSamplerUniformRange().empty())
258 {
259 // Ensure the descriptor set range includes the textures at position 1.
260 mUsedDescriptorSetRange.extend(1);
261 mDirtyTextures = true;
262 }
263
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500264 return true;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400265}
266
Jamie Madill76e471e2017-10-21 09:56:01 -0400267gl::Error ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
268{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400269 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500270 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400271 VkDevice device = contextVk->getDevice();
272
273 // Process vertex and fragment uniforms into std140 packing.
274 std::array<sh::BlockLayoutMap, 2> layoutMap;
275 std::array<size_t, 2> requiredBufferSize = {{0, 0}};
276
277 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
278 {
279 ANGLE_TRY(InitDefaultUniformBlock(glContext, device, GetShader(mState, shaderIndex),
280 &mDefaultUniformBlocks[shaderIndex].storage,
281 &layoutMap[shaderIndex],
282 &requiredBufferSize[shaderIndex]));
283 }
284
285 // Init the default block layout info.
286 const auto &locations = mState.getUniformLocations();
287 const auto &uniforms = mState.getUniforms();
288 for (size_t locationIndex = 0; locationIndex < locations.size(); ++locationIndex)
289 {
290 std::array<sh::BlockMemberInfo, 2> layoutInfo;
291
292 const auto &location = locations[locationIndex];
293 if (location.used() && !location.ignored)
294 {
Jamie Madillde03e002017-10-21 14:04:20 -0400295 const auto &uniform = uniforms[location.index];
296
297 if (uniform.isSampler())
298 continue;
299
Jamie Madill76e471e2017-10-21 09:56:01 -0400300 std::string uniformName = uniform.name;
301 if (uniform.isArray())
302 {
Olli Etuaho1734e172017-10-27 15:30:27 +0300303 uniformName += ArrayString(location.arrayIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400304 }
305
306 bool found = false;
307
308 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
309 {
310 auto it = layoutMap[shaderIndex].find(uniformName);
311 if (it != layoutMap[shaderIndex].end())
312 {
313 found = true;
314 layoutInfo[shaderIndex] = it->second;
315 }
316 }
317
318 ASSERT(found);
319 }
320
321 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
322 {
323 mDefaultUniformBlocks[shaderIndex].uniformLayout.push_back(layoutInfo[shaderIndex]);
324 }
325 }
326
327 bool anyDirty = false;
328 bool allDirty = true;
329
330 for (uint32_t shaderIndex = MinShaderIndex; shaderIndex < MaxShaderIndex; ++shaderIndex)
331 {
332 if (requiredBufferSize[shaderIndex] > 0)
333 {
334 if (!mDefaultUniformBlocks[shaderIndex].uniformData.resize(
335 requiredBufferSize[shaderIndex]))
336 {
337 return gl::OutOfMemory() << "Memory allocation failure.";
338 }
339 mDefaultUniformBlocks[shaderIndex].uniformData.fill(0);
340 mDefaultUniformBlocks[shaderIndex].uniformsDirty = true;
341
342 anyDirty = true;
343 }
344 else
345 {
346 allDirty = false;
347 }
348 }
349
350 if (anyDirty)
351 {
352 // Initialize the "empty" uniform block if necessary.
353 if (!allDirty)
354 {
355 VkBufferCreateInfo uniformBufferInfo;
356 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
357 uniformBufferInfo.pNext = nullptr;
358 uniformBufferInfo.flags = 0;
359 uniformBufferInfo.size = 1;
360 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
361 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
362 uniformBufferInfo.queueFamilyIndexCount = 0;
363 uniformBufferInfo.pQueueFamilyIndices = nullptr;
364
365 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(device, uniformBufferInfo));
366
Jamie Madill57dd97a2018-02-06 17:10:49 -0500367 // Assume host vislble/coherent memory available.
368 VkMemoryPropertyFlags flags =
369 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Jamie Madill76e471e2017-10-21 09:56:01 -0400370 size_t requiredSize = 0;
Jamie Madill57fbfd82018-02-14 12:45:34 -0500371 ANGLE_TRY(AllocateBufferMemory(renderer, flags, &mEmptyUniformBlockStorage.buffer,
Jamie Madill76e471e2017-10-21 09:56:01 -0400372 &mEmptyUniformBlockStorage.memory, &requiredSize));
373 }
374
375 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
Jamie Madill8c3988c2017-12-21 14:44:56 -0500376
377 // Ensure the descriptor set range includes the uniform buffers at position 0.
378 mUsedDescriptorSetRange.extend(0);
Jamie Madill5547b382017-10-23 18:16:01 -0400379 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400380
381 return gl::NoError();
382}
383
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400384GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
385{
386 UNIMPLEMENTED();
387 return GLboolean();
388}
389
Jamie Madill76e471e2017-10-21 09:56:01 -0400390template <typename T>
391void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
392{
393 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
394 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
395
396 if (linkedUniform.type == entryPointType)
397 {
398 for (auto &uniformBlock : mDefaultUniformBlocks)
399 {
400 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
401 UpdateDefaultUniformBlock(count, linkedUniform.typeInfo->componentCount, v, layoutInfo,
402 &uniformBlock.uniformData);
403 }
404 }
405 else
406 {
407 ASSERT(linkedUniform.type == gl::VariableBoolVectorType(entryPointType));
408 UNIMPLEMENTED();
409 }
410}
411
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400412void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
413{
Jamie Madill76e471e2017-10-21 09:56:01 -0400414 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400415}
416
417void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
418{
Jamie Madill76e471e2017-10-21 09:56:01 -0400419 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400420}
421
422void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
423{
Jamie Madill76e471e2017-10-21 09:56:01 -0400424 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400425}
426
427void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
428{
Jamie Madill76e471e2017-10-21 09:56:01 -0400429 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400430}
431
432void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
433{
434 UNIMPLEMENTED();
435}
436
437void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
438{
439 UNIMPLEMENTED();
440}
441
442void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
443{
444 UNIMPLEMENTED();
445}
446
447void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
448{
449 UNIMPLEMENTED();
450}
451
452void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
453{
454 UNIMPLEMENTED();
455}
456
457void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
458{
459 UNIMPLEMENTED();
460}
461
462void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
463{
464 UNIMPLEMENTED();
465}
466
467void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
468{
469 UNIMPLEMENTED();
470}
471
472void ProgramVk::setUniformMatrix2fv(GLint location,
473 GLsizei count,
474 GLboolean transpose,
475 const GLfloat *value)
476{
477 UNIMPLEMENTED();
478}
479
480void ProgramVk::setUniformMatrix3fv(GLint location,
481 GLsizei count,
482 GLboolean transpose,
483 const GLfloat *value)
484{
485 UNIMPLEMENTED();
486}
487
488void ProgramVk::setUniformMatrix4fv(GLint location,
489 GLsizei count,
490 GLboolean transpose,
491 const GLfloat *value)
492{
493 UNIMPLEMENTED();
494}
495
496void ProgramVk::setUniformMatrix2x3fv(GLint location,
497 GLsizei count,
498 GLboolean transpose,
499 const GLfloat *value)
500{
501 UNIMPLEMENTED();
502}
503
504void ProgramVk::setUniformMatrix3x2fv(GLint location,
505 GLsizei count,
506 GLboolean transpose,
507 const GLfloat *value)
508{
509 UNIMPLEMENTED();
510}
511
512void ProgramVk::setUniformMatrix2x4fv(GLint location,
513 GLsizei count,
514 GLboolean transpose,
515 const GLfloat *value)
516{
517 UNIMPLEMENTED();
518}
519
520void ProgramVk::setUniformMatrix4x2fv(GLint location,
521 GLsizei count,
522 GLboolean transpose,
523 const GLfloat *value)
524{
525 UNIMPLEMENTED();
526}
527
528void ProgramVk::setUniformMatrix3x4fv(GLint location,
529 GLsizei count,
530 GLboolean transpose,
531 const GLfloat *value)
532{
533 UNIMPLEMENTED();
534}
535
536void ProgramVk::setUniformMatrix4x3fv(GLint location,
537 GLsizei count,
538 GLboolean transpose,
539 const GLfloat *value)
540{
541 UNIMPLEMENTED();
542}
543
544void ProgramVk::setUniformBlockBinding(GLuint uniformBlockIndex, GLuint uniformBlockBinding)
545{
546 UNIMPLEMENTED();
547}
548
Sami Väisänen46eaa942016-06-29 10:26:37 +0300549void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
550 GLenum genMode,
551 GLint components,
552 const GLfloat *coeffs)
553{
554 UNIMPLEMENTED();
555}
556
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500557const vk::ShaderModule &ProgramVk::getLinkedVertexModule() const
558{
559 ASSERT(mLinkedVertexModule.getHandle() != VK_NULL_HANDLE);
560 return mLinkedVertexModule;
561}
562
Jamie Madillf2f6d372018-01-10 21:37:23 -0500563Serial ProgramVk::getVertexModuleSerial() const
564{
565 return mVertexModuleSerial;
566}
567
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500568const vk::ShaderModule &ProgramVk::getLinkedFragmentModule() const
569{
570 ASSERT(mLinkedFragmentModule.getHandle() != VK_NULL_HANDLE);
571 return mLinkedFragmentModule;
572}
573
Jamie Madillf2f6d372018-01-10 21:37:23 -0500574Serial ProgramVk::getFragmentModuleSerial() const
575{
576 return mFragmentModuleSerial;
577}
578
Jamie Madill76e471e2017-10-21 09:56:01 -0400579vk::Error ProgramVk::initDescriptorSets(ContextVk *contextVk)
580{
Jamie Madill5547b382017-10-23 18:16:01 -0400581 ASSERT(mDescriptorSets.empty());
Jamie Madill76e471e2017-10-21 09:56:01 -0400582
Jamie Madill8c3988c2017-12-21 14:44:56 -0500583 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400584 VkDevice device = contextVk->getDevice();
585
586 // Write out to a new a descriptor set.
587 // TODO(jmadill): Handle descriptor set lifetime.
588 vk::DescriptorPool *descriptorPool = contextVk->getDescriptorPool();
589
Jamie Madill8c3988c2017-12-21 14:44:56 -0500590 const auto &descriptorSetLayouts = renderer->getGraphicsDescriptorSetLayouts();
591
592 uint32_t descriptorSetCount = static_cast<uint32_t>(descriptorSetLayouts.size());
Jamie Madill76e471e2017-10-21 09:56:01 -0400593
Jamie Madill5547b382017-10-23 18:16:01 -0400594 VkDescriptorSetAllocateInfo allocInfo;
595 allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
596 allocInfo.pNext = nullptr;
597 allocInfo.descriptorPool = descriptorPool->getHandle();
598 allocInfo.descriptorSetCount = descriptorSetCount;
Jamie Madill8c3988c2017-12-21 14:44:56 -0500599 allocInfo.pSetLayouts = descriptorSetLayouts[0].ptr();
Jamie Madill76e471e2017-10-21 09:56:01 -0400600
Jamie Madill5547b382017-10-23 18:16:01 -0400601 mDescriptorSets.resize(descriptorSetCount, VK_NULL_HANDLE);
602 ANGLE_TRY(descriptorPool->allocateDescriptorSets(device, allocInfo, &mDescriptorSets[0]));
Jamie Madill76e471e2017-10-21 09:56:01 -0400603 return vk::NoError();
604}
605
Jamie Madill54164b02017-08-28 15:17:37 -0400606void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
607{
608 UNIMPLEMENTED();
609}
610
611void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
612{
613 UNIMPLEMENTED();
614}
615
616void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
617{
618 UNIMPLEMENTED();
619}
620
Jamie Madill76e471e2017-10-21 09:56:01 -0400621vk::Error ProgramVk::updateUniforms(ContextVk *contextVk)
622{
623 if (!mDefaultUniformBlocks[VertexShader].uniformsDirty &&
624 !mDefaultUniformBlocks[FragmentShader].uniformsDirty)
625 {
626 return vk::NoError();
627 }
628
Jamie Madill8c3988c2017-12-21 14:44:56 -0500629 ASSERT(mUsedDescriptorSetRange.contains(0));
Jamie Madill5547b382017-10-23 18:16:01 -0400630
Jamie Madill76e471e2017-10-21 09:56:01 -0400631 VkDevice device = contextVk->getDevice();
632
633 // Update buffer memory by immediate mapping. This immediate update only works once.
634 // TODO(jmadill): Handle inserting updates into the command stream, or use dynamic buffers.
635 for (auto &uniformBlock : mDefaultUniformBlocks)
636 {
637 if (uniformBlock.uniformsDirty)
638 {
639 ANGLE_TRY(SyncDefaultUniformBlock(device, &uniformBlock.storage.memory,
640 uniformBlock.uniformData));
641 uniformBlock.uniformsDirty = false;
642 }
643 }
644
645 return vk::NoError();
646}
647
648vk::Error ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
649{
650 std::array<VkDescriptorBufferInfo, 2> descriptorBufferInfo;
651 std::array<VkWriteDescriptorSet, 2> writeDescriptorInfo;
652 uint32_t bufferCount = 0;
653
654 for (auto &uniformBlock : mDefaultUniformBlocks)
655 {
656 auto &bufferInfo = descriptorBufferInfo[bufferCount];
657
658 if (!uniformBlock.uniformData.empty())
659 {
660 bufferInfo.buffer = uniformBlock.storage.buffer.getHandle();
661 }
662 else
663 {
664 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
665 }
666
667 bufferInfo.offset = 0;
668 bufferInfo.range = VK_WHOLE_SIZE;
669
670 auto &writeInfo = writeDescriptorInfo[bufferCount];
671
672 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
673 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400674 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill76e471e2017-10-21 09:56:01 -0400675 writeInfo.dstBinding = bufferCount;
676 writeInfo.dstArrayElement = 0;
677 writeInfo.descriptorCount = 1;
678 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
679 writeInfo.pImageInfo = nullptr;
680 writeInfo.pBufferInfo = &bufferInfo;
681 writeInfo.pTexelBufferView = nullptr;
682
683 bufferCount++;
684 }
685
686 VkDevice device = contextVk->getDevice();
687
688 vkUpdateDescriptorSets(device, bufferCount, writeDescriptorInfo.data(), 0, nullptr);
689
690 return vk::NoError();
691}
692
Jamie Madill5547b382017-10-23 18:16:01 -0400693const std::vector<VkDescriptorSet> &ProgramVk::getDescriptorSets() const
Jamie Madill76e471e2017-10-21 09:56:01 -0400694{
Jamie Madill5547b382017-10-23 18:16:01 -0400695 return mDescriptorSets;
696}
697
Jamie Madill8c3988c2017-12-21 14:44:56 -0500698const gl::RangeUI &ProgramVk::getUsedDescriptorSetRange() const
Jamie Madill5547b382017-10-23 18:16:01 -0400699{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500700 return mUsedDescriptorSetRange;
Jamie Madill5547b382017-10-23 18:16:01 -0400701}
702
703void ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
704{
705 if (mState.getSamplerBindings().empty() || !mDirtyTextures)
706 {
707 return;
708 }
709
Jamie Madill8c3988c2017-12-21 14:44:56 -0500710 ASSERT(mUsedDescriptorSetRange.contains(1));
711 VkDescriptorSet descriptorSet = mDescriptorSets[1];
Jamie Madill5547b382017-10-23 18:16:01 -0400712
713 // TODO(jmadill): Don't hard-code the texture limit.
714 ShaderTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
715 ShaderTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
716 uint32_t imageCount = 0;
717
718 const gl::State &glState = contextVk->getGLState();
719 const auto &completeTextures = glState.getCompleteTextureCache();
720
721 for (const auto &samplerBinding : mState.getSamplerBindings())
722 {
723 ASSERT(!samplerBinding.unreferenced);
724
725 // TODO(jmadill): Sampler arrays
726 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
727
728 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
729 const gl::Texture *texture = completeTextures[textureUnit];
730
731 // TODO(jmadill): Incomplete textures handling.
732 ASSERT(texture);
733
Jamie Madille1f3ad42017-10-28 23:00:42 -0400734 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill5547b382017-10-23 18:16:01 -0400735 const vk::Image &image = textureVk->getImage();
736
737 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[imageCount];
738
739 imageInfo.sampler = textureVk->getSampler().getHandle();
740 imageInfo.imageView = textureVk->getImageView().getHandle();
741 imageInfo.imageLayout = image.getCurrentLayout();
742
743 auto &writeInfo = writeDescriptorInfo[imageCount];
744
745 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
746 writeInfo.pNext = nullptr;
747 writeInfo.dstSet = descriptorSet;
748 writeInfo.dstBinding = imageCount;
749 writeInfo.dstArrayElement = 0;
750 writeInfo.descriptorCount = 1;
751 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
752 writeInfo.pImageInfo = &imageInfo;
753 writeInfo.pBufferInfo = nullptr;
754 writeInfo.pTexelBufferView = nullptr;
755
756 imageCount++;
757 }
758
759 VkDevice device = contextVk->getDevice();
760
761 ASSERT(imageCount > 0);
762 vkUpdateDescriptorSets(device, imageCount, writeDescriptorInfo.data(), 0, nullptr);
763
764 mDirtyTextures = false;
765}
766
767void ProgramVk::invalidateTextures()
768{
769 mDirtyTextures = true;
Jamie Madill76e471e2017-10-21 09:56:01 -0400770}
771
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400772} // namespace rx