blob: 022c6dbd3d97c37325b21dac43c4f25685303547 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ProgramVk.cpp:
7// Implements the class methods for ProgramVk.
8//
9
10#include "libANGLE/renderer/vulkan/ProgramVk.h"
11
12#include "common/debug.h"
Jamie Madill76e471e2017-10-21 09:56:01 -040013#include "common/utilities.h"
Jamie Madillc564c072017-06-01 12:45:42 -040014#include "libANGLE/Context.h"
Luc Ferron48cdc2e2018-05-31 09:58:34 -040015#include "libANGLE/renderer/renderer_utils.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050016#include "libANGLE/renderer/vulkan/ContextVk.h"
17#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
18#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill5547b382017-10-23 18:16:01 -040019#include "libANGLE/renderer/vulkan/TextureVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040020
21namespace rx
22{
23
Jamie Madill76e471e2017-10-21 09:56:01 -040024namespace
25{
26
Jamie Madillf3614372018-03-31 14:19:14 -040027constexpr size_t kUniformBlockDynamicBufferMinSize = 256 * 128;
Luc Ferron7a06ac12018-03-15 10:17:04 -040028
Jamie Madill242c4fe2018-07-12 15:56:56 -040029void InitDefaultUniformBlock(const std::vector<sh::Uniform> &uniforms,
Jamie Madill21061022018-07-12 23:56:30 -040030 gl::Shader *shader,
31 sh::BlockLayoutMap *blockLayoutMapOut,
32 size_t *blockSizeOut)
Jamie Madill76e471e2017-10-21 09:56:01 -040033{
Jamie Madill76e471e2017-10-21 09:56:01 -040034 if (uniforms.empty())
35 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040036 *blockSizeOut = 0;
Jamie Madill21061022018-07-12 23:56:30 -040037 return;
Jamie Madill76e471e2017-10-21 09:56:01 -040038 }
39
40 sh::Std140BlockEncoder blockEncoder;
Olli Etuaho3de27032017-11-30 12:16:47 +020041 sh::GetUniformBlockInfo(uniforms, "", &blockEncoder, blockLayoutMapOut);
Jamie Madill76e471e2017-10-21 09:56:01 -040042
43 size_t blockSize = blockEncoder.getBlockSize();
44
45 // TODO(jmadill): I think we still need a valid block for the pipeline even if zero sized.
46 if (blockSize == 0)
47 {
Luc Ferron7a06ac12018-03-15 10:17:04 -040048 *blockSizeOut = 0;
Jamie Madill21061022018-07-12 23:56:30 -040049 return;
Jamie Madill76e471e2017-10-21 09:56:01 -040050 }
51
Luc Ferron7a06ac12018-03-15 10:17:04 -040052 *blockSizeOut = blockSize;
Jamie Madill21061022018-07-12 23:56:30 -040053 return;
Jamie Madill76e471e2017-10-21 09:56:01 -040054}
55
56template <typename T>
57void UpdateDefaultUniformBlock(GLsizei count,
Luc Ferron2371aca2018-03-27 16:03:03 -040058 uint32_t arrayIndex,
Jamie Madill76e471e2017-10-21 09:56:01 -040059 int componentCount,
60 const T *v,
61 const sh::BlockMemberInfo &layoutInfo,
62 angle::MemoryBuffer *uniformData)
63{
Luc Ferron2371aca2018-03-27 16:03:03 -040064 const int elementSize = sizeof(T) * componentCount;
65
66 uint8_t *dst = uniformData->data() + layoutInfo.offset;
Jamie Madill76e471e2017-10-21 09:56:01 -040067 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
68 {
Luc Ferron2371aca2018-03-27 16:03:03 -040069 uint32_t arrayOffset = arrayIndex * layoutInfo.arrayStride;
70 uint8_t *writePtr = dst + arrayOffset;
Geoff Langbeb0c942018-09-27 11:22:23 -040071 ASSERT(writePtr + (elementSize * count) <= uniformData->data() + uniformData->size());
Jamie Madill76e471e2017-10-21 09:56:01 -040072 memcpy(writePtr, v, elementSize * count);
73 }
74 else
75 {
Luc Ferron2371aca2018-03-27 16:03:03 -040076 // Have to respect the arrayStride between each element of the array.
77 int maxIndex = arrayIndex + count;
78 for (int writeIndex = arrayIndex, readIndex = 0; writeIndex < maxIndex;
79 writeIndex++, readIndex++)
80 {
81 const int arrayOffset = writeIndex * layoutInfo.arrayStride;
82 uint8_t *writePtr = dst + arrayOffset;
Luc Ferrone9465a62018-06-04 10:41:52 -040083 const T *readPtr = v + (readIndex * componentCount);
Geoff Langbeb0c942018-09-27 11:22:23 -040084 ASSERT(writePtr + elementSize <= uniformData->data() + uniformData->size());
Luc Ferron2371aca2018-03-27 16:03:03 -040085 memcpy(writePtr, readPtr, elementSize);
86 }
Jamie Madill76e471e2017-10-21 09:56:01 -040087 }
88}
89
Luc Ferron7cec3352018-03-13 13:29:34 -040090template <typename T>
91void ReadFromDefaultUniformBlock(int componentCount,
Luc Ferron2371aca2018-03-27 16:03:03 -040092 uint32_t arrayIndex,
Luc Ferron7cec3352018-03-13 13:29:34 -040093 T *dst,
94 const sh::BlockMemberInfo &layoutInfo,
95 const angle::MemoryBuffer *uniformData)
96{
97 ASSERT(layoutInfo.offset != -1);
98
Luc Ferron2371aca2018-03-27 16:03:03 -040099 const int elementSize = sizeof(T) * componentCount;
100 const uint8_t *source = uniformData->data() + layoutInfo.offset;
101
Luc Ferron7cec3352018-03-13 13:29:34 -0400102 if (layoutInfo.arrayStride == 0 || layoutInfo.arrayStride == elementSize)
103 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400104 const uint8_t *readPtr = source + arrayIndex * layoutInfo.arrayStride;
Luc Ferron7cec3352018-03-13 13:29:34 -0400105 memcpy(dst, readPtr, elementSize);
106 }
107 else
108 {
Luc Ferron2371aca2018-03-27 16:03:03 -0400109 // Have to respect the arrayStride between each element of the array.
110 const int arrayOffset = arrayIndex * layoutInfo.arrayStride;
111 const uint8_t *readPtr = source + arrayOffset;
112 memcpy(dst, readPtr, elementSize);
Luc Ferron7cec3352018-03-13 13:29:34 -0400113 }
114}
115
Jamie Madill21061022018-07-12 23:56:30 -0400116angle::Result SyncDefaultUniformBlock(ContextVk *contextVk,
117 vk::DynamicBuffer *dynamicBuffer,
118 const angle::MemoryBuffer &bufferData,
119 uint32_t *outOffset,
120 bool *outBufferModified)
Jamie Madill76e471e2017-10-21 09:56:01 -0400121{
Luc Ferron7a06ac12018-03-15 10:17:04 -0400122 ASSERT(!bufferData.empty());
123 uint8_t *data = nullptr;
124 VkBuffer *outBuffer = nullptr;
Jamie Madill4c310832018-08-29 13:43:17 -0400125 VkDeviceSize offset = 0;
Jamie Madill21061022018-07-12 23:56:30 -0400126 ANGLE_TRY(dynamicBuffer->allocate(contextVk, bufferData.size(), &data, outBuffer, &offset,
Jamie Madillc3755fc2018-04-05 08:39:13 -0400127 outBufferModified));
Jamie Madill4c310832018-08-29 13:43:17 -0400128 *outOffset = static_cast<uint32_t>(offset);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400129 memcpy(data, bufferData.data(), bufferData.size());
Jamie Madill21061022018-07-12 23:56:30 -0400130 ANGLE_TRY(dynamicBuffer->flush(contextVk));
131 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400132}
Jamie Madillb36a4812018-09-25 10:15:11 -0400133
Jamie Madillbfe31c42018-10-25 17:03:47 -0400134bool UseLineRaster(const ContextVk *contextVk, gl::PrimitiveMode mode)
Jamie Madillb36a4812018-09-25 10:15:11 -0400135{
Jamie Madillbfe31c42018-10-25 17:03:47 -0400136 return contextVk->getFeatures().basicGLLineRasterization && gl::IsLineMode(mode);
Jamie Madillb36a4812018-09-25 10:15:11 -0400137}
Jamie Madill76e471e2017-10-21 09:56:01 -0400138} // anonymous namespace
139
Jamie Madill242c4fe2018-07-12 15:56:56 -0400140// ProgramVk::ShaderInfo implementation.
141ProgramVk::ShaderInfo::ShaderInfo()
142{
143}
144
145ProgramVk::ShaderInfo::~ShaderInfo() = default;
146
147angle::Result ProgramVk::ShaderInfo::getShaders(
148 ContextVk *contextVk,
149 const std::string &vertexSource,
150 const std::string &fragmentSource,
Jamie Madillb36a4812018-09-25 10:15:11 -0400151 bool enableLineRasterEmulation,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400152 const vk::ShaderAndSerial **vertexShaderAndSerialOut,
153 const vk::ShaderAndSerial **fragmentShaderAndSerialOut)
154{
155 if (!valid())
156 {
157 std::vector<uint32_t> vertexCode;
158 std::vector<uint32_t> fragmentCode;
Jamie Madillb36a4812018-09-25 10:15:11 -0400159 ANGLE_TRY(GlslangWrapper::GetShaderCode(contextVk, contextVk->getCaps(),
160 enableLineRasterEmulation, vertexSource,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400161 fragmentSource, &vertexCode, &fragmentCode));
162
163 ANGLE_TRY(vk::InitShaderAndSerial(contextVk, &mVertexShaderAndSerial, vertexCode.data(),
164 vertexCode.size() * sizeof(uint32_t)));
165 ANGLE_TRY(vk::InitShaderAndSerial(contextVk, &mFragmentShaderAndSerial, fragmentCode.data(),
166 fragmentCode.size() * sizeof(uint32_t)));
167 }
168
169 *fragmentShaderAndSerialOut = &mFragmentShaderAndSerial;
170 *vertexShaderAndSerialOut = &mVertexShaderAndSerial;
171 return angle::Result::Continue();
172}
173
174void ProgramVk::ShaderInfo::destroy(VkDevice device)
175{
176 mVertexShaderAndSerial.destroy(device);
177 mFragmentShaderAndSerial.destroy(device);
178}
179
180bool ProgramVk::ShaderInfo::valid() const
181{
182 return mVertexShaderAndSerial.valid();
183}
184
185// ProgramVk implementation.
Jamie Madill76e471e2017-10-21 09:56:01 -0400186ProgramVk::DefaultUniformBlock::DefaultUniformBlock()
Luc Ferron7a06ac12018-03-15 10:17:04 -0400187 : storage(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
Jamie Madill2b858c22018-09-03 13:58:14 -0400188 kUniformBlockDynamicBufferMinSize)
Jamie Madill76e471e2017-10-21 09:56:01 -0400189{
190}
191
Jamie Madill242c4fe2018-07-12 15:56:56 -0400192ProgramVk::DefaultUniformBlock::~DefaultUniformBlock() = default;
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500193
Jamie Madill88fc6da2018-08-30 16:18:36 -0400194ProgramVk::ProgramVk(const gl::ProgramState &state) : ProgramImpl(state), mUniformBlocksOffsets{}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400195{
Jamie Madill8c3988c2017-12-21 14:44:56 -0500196 mUsedDescriptorSetRange.invalidate();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400197}
198
Jamie Madill242c4fe2018-07-12 15:56:56 -0400199ProgramVk::~ProgramVk() = default;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400200
Jamie Madillf4a789f2018-10-18 16:56:20 -0400201void ProgramVk::destroy(const gl::Context *context)
Jamie Madill5deea722017-02-16 10:44:46 -0500202{
Jamie Madill242c4fe2018-07-12 15:56:56 -0400203 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madillf4a789f2018-10-18 16:56:20 -0400204
205 // We don't interrupt exectution in destructors.
206 (void)reset(contextVk);
Jamie Madillc5143482017-10-15 20:20:06 -0400207}
Jamie Madill5deea722017-02-16 10:44:46 -0500208
Jamie Madill21061022018-07-12 23:56:30 -0400209angle::Result ProgramVk::reset(ContextVk *contextVk)
Jamie Madillc5143482017-10-15 20:20:06 -0400210{
Jamie Madill67ae6c52018-03-09 11:49:01 -0500211 VkDevice device = contextVk->getDevice();
212
Jamie Madill9b168d02018-06-13 13:25:32 -0400213 for (auto &descriptorSetLayout : mDescriptorSetLayouts)
214 {
215 descriptorSetLayout.reset();
216 }
217 mPipelineLayout.reset();
218
Jamie Madillcaaff162018-06-22 08:55:37 -0400219 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400220 for (auto &uniformBlock : mDefaultUniformBlocks)
221 {
Jamie Madillcaaff162018-06-22 08:55:37 -0400222 uniformBlock.storage.release(renderer);
Jamie Madill76e471e2017-10-21 09:56:01 -0400223 }
224
Jamie Madill242c4fe2018-07-12 15:56:56 -0400225 mDefaultShaderInfo.destroy(device);
Jamie Madillb36a4812018-09-25 10:15:11 -0400226 mLineRasterShaderInfo.destroy(device);
Jamie Madill242c4fe2018-07-12 15:56:56 -0400227
Jamie Madillcaaff162018-06-22 08:55:37 -0400228 Serial currentSerial = renderer->getCurrentQueueSerial();
229 renderer->releaseObject(currentSerial, &mEmptyUniformBlockStorage.memory);
230 renderer->releaseObject(currentSerial, &mEmptyUniformBlockStorage.buffer);
Jamie Madill76e471e2017-10-21 09:56:01 -0400231
Jamie Madill5547b382017-10-23 18:16:01 -0400232 mDescriptorSets.clear();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500233 mUsedDescriptorSetRange.invalidate();
Jamie Madillb7d924a2018-03-10 11:16:54 -0500234
Jamie Madill78bcd2b2018-10-16 15:05:20 -0400235 for (vk::SharedDescriptorPoolBinding &binding : mDescriptorPoolBindings)
236 {
237 binding.reset();
238 }
239
Jamie Madill21061022018-07-12 23:56:30 -0400240 return angle::Result::Continue();
Jamie Madill5deea722017-02-16 10:44:46 -0500241}
242
Jamie Madill785e8a02018-10-04 17:42:00 -0400243angle::Result ProgramVk::load(const gl::Context *context,
244 gl::InfoLog &infoLog,
245 gl::BinaryInputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400246{
247 UNIMPLEMENTED();
Jamie Madill785e8a02018-10-04 17:42:00 -0400248 return angle::Result::Stop();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400249}
250
Jamie Madill27a60632017-06-30 15:12:01 -0400251void ProgramVk::save(const gl::Context *context, gl::BinaryOutputStream *stream)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400252{
253 UNIMPLEMENTED();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400254}
255
256void ProgramVk::setBinaryRetrievableHint(bool retrievable)
257{
258 UNIMPLEMENTED();
259}
260
Yunchao He61afff12017-03-14 15:34:03 +0800261void ProgramVk::setSeparable(bool separable)
262{
263 UNIMPLEMENTED();
264}
265
Jamie Madill785e8a02018-10-04 17:42:00 -0400266std::unique_ptr<LinkEvent> ProgramVk::link(const gl::Context *context,
jchen107ae70d82018-07-06 13:47:01 +0800267 const gl::ProgramLinkedResources &resources,
268 gl::InfoLog &infoLog)
269{
270 // TODO(jie.a.chen@intel.com): Parallelize linking.
271 // http://crbug.com/849576
Jamie Madill785e8a02018-10-04 17:42:00 -0400272 return std::make_unique<LinkEventDone>(linkImpl(context, resources, infoLog));
jchen107ae70d82018-07-06 13:47:01 +0800273}
274
Jamie Madill785e8a02018-10-04 17:42:00 -0400275angle::Result ProgramVk::linkImpl(const gl::Context *glContext,
276 const gl::ProgramLinkedResources &resources,
277 gl::InfoLog &infoLog)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400278{
Jamie Madill06ca6342018-07-12 15:56:53 -0400279 ContextVk *contextVk = vk::GetImpl(glContext);
280 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc5143482017-10-15 20:20:06 -0400281
Jamie Madillb7d924a2018-03-10 11:16:54 -0500282 ANGLE_TRY(reset(contextVk));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500283
jchen103fd614d2018-08-13 12:21:58 +0800284 GlslangWrapper::GetShaderSource(mState, resources, &mVertexSource, &mFragmentSource);
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500285
Jamie Madill76e471e2017-10-21 09:56:01 -0400286 ANGLE_TRY(initDefaultUniformBlocks(glContext));
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500287
Jamie Madill8c3988c2017-12-21 14:44:56 -0500288 if (!mState.getSamplerUniformRange().empty())
289 {
290 // Ensure the descriptor set range includes the textures at position 1.
Jamie Madill9b168d02018-06-13 13:25:32 -0400291 mUsedDescriptorSetRange.extend(kTextureDescriptorSetIndex);
Jamie Madill8c3988c2017-12-21 14:44:56 -0500292 }
293
Jamie Madill9b168d02018-06-13 13:25:32 -0400294 // Store a reference to the pipeline and descriptor set layouts. This will create them if they
295 // don't already exist in the cache.
296 vk::DescriptorSetLayoutDesc uniformsSetDesc;
297 uniformsSetDesc.update(kVertexUniformsBindingIndex, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
298 1);
299 uniformsSetDesc.update(kFragmentUniformsBindingIndex, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
300 1);
301
302 ANGLE_TRY(renderer->getDescriptorSetLayout(
Jamie Madill21061022018-07-12 23:56:30 -0400303 contextVk, uniformsSetDesc, &mDescriptorSetLayouts[kUniformsDescriptorSetIndex]));
Jamie Madill9b168d02018-06-13 13:25:32 -0400304
Jamie Madill9b168d02018-06-13 13:25:32 -0400305 vk::DescriptorSetLayoutDesc texturesSetDesc;
Jamie Madill4cc753e2018-06-13 13:25:33 -0400306
307 for (uint32_t textureIndex = 0; textureIndex < mState.getSamplerBindings().size();
308 ++textureIndex)
Jamie Madill9b168d02018-06-13 13:25:32 -0400309 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400310 const gl::SamplerBinding &samplerBinding = mState.getSamplerBindings()[textureIndex];
311
312 // The front-end always binds array sampler units sequentially.
313 const uint32_t count = static_cast<uint32_t>(samplerBinding.boundTextureUnits.size());
314 texturesSetDesc.update(textureIndex, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, count);
Jamie Madill9b168d02018-06-13 13:25:32 -0400315 }
316
Jamie Madill21061022018-07-12 23:56:30 -0400317 ANGLE_TRY(renderer->getDescriptorSetLayout(contextVk, texturesSetDesc,
Jamie Madill9b168d02018-06-13 13:25:32 -0400318 &mDescriptorSetLayouts[kTextureDescriptorSetIndex]));
319
Jamie Madillb01b4802018-07-10 12:43:57 -0400320 vk::DescriptorSetLayoutDesc driverUniformsSetDesc;
321 driverUniformsSetDesc.update(0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1);
322 ANGLE_TRY(renderer->getDescriptorSetLayout(
Jamie Madill21061022018-07-12 23:56:30 -0400323 contextVk, driverUniformsSetDesc,
324 &mDescriptorSetLayouts[kDriverUniformsDescriptorSetIndex]));
Jamie Madillb01b4802018-07-10 12:43:57 -0400325
Jamie Madill9b168d02018-06-13 13:25:32 -0400326 vk::PipelineLayoutDesc pipelineLayoutDesc;
327 pipelineLayoutDesc.updateDescriptorSetLayout(kUniformsDescriptorSetIndex, uniformsSetDesc);
328 pipelineLayoutDesc.updateDescriptorSetLayout(kTextureDescriptorSetIndex, texturesSetDesc);
Jamie Madillb01b4802018-07-10 12:43:57 -0400329 pipelineLayoutDesc.updateDescriptorSetLayout(kDriverUniformsDescriptorSetIndex,
330 driverUniformsSetDesc);
Jamie Madill9b168d02018-06-13 13:25:32 -0400331
Jamie Madill21061022018-07-12 23:56:30 -0400332 ANGLE_TRY(renderer->getPipelineLayout(contextVk, pipelineLayoutDesc, mDescriptorSetLayouts,
333 &mPipelineLayout));
Jamie Madill9b168d02018-06-13 13:25:32 -0400334
Jamie Madill242c4fe2018-07-12 15:56:56 -0400335 if (!mState.getUniforms().empty())
336 {
337 const gl::RangeUI &samplerRange = mState.getSamplerUniformRange();
338
339 if (mState.getUniforms().size() > samplerRange.length())
340 {
341 // Ensure the descriptor set range includes the uniform buffers at position 0.
342 mUsedDescriptorSetRange.extend(kUniformsDescriptorSetIndex);
343 }
344
345 if (!samplerRange.empty())
346 {
347 // Ensure the descriptor set range includes the textures at position 1.
348 mUsedDescriptorSetRange.extend(kTextureDescriptorSetIndex);
Jamie Madill242c4fe2018-07-12 15:56:56 -0400349 }
350 }
351
Jamie Madill785e8a02018-10-04 17:42:00 -0400352 return angle::Result::Continue();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400353}
354
Jamie Madill242c4fe2018-07-12 15:56:56 -0400355angle::Result ProgramVk::initDefaultUniformBlocks(const gl::Context *glContext)
Jamie Madill76e471e2017-10-21 09:56:01 -0400356{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400357 ContextVk *contextVk = vk::GetImpl(glContext);
Jamie Madill57fbfd82018-02-14 12:45:34 -0500358 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill76e471e2017-10-21 09:56:01 -0400359
360 // Process vertex and fragment uniforms into std140 packing.
Jamie Madill33318de2018-05-01 11:22:54 -0400361 vk::ShaderMap<sh::BlockLayoutMap> layoutMap;
362 vk::ShaderMap<size_t> requiredBufferSize;
363 requiredBufferSize.fill(0);
Jamie Madill76e471e2017-10-21 09:56:01 -0400364
Jamie Madill33318de2018-05-01 11:22:54 -0400365 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400366 {
Jamie Madill77abad82018-10-25 17:03:48 -0400367 gl::ShaderType glShaderType = static_cast<gl::ShaderType>(shaderType);
Jamie Madill242c4fe2018-07-12 15:56:56 -0400368 gl::Shader *shader = mState.getAttachedShader(glShaderType);
jchen103fd614d2018-08-13 12:21:58 +0800369 const std::vector<sh::Uniform> &uniforms = shader->getUniforms();
Jamie Madill242c4fe2018-07-12 15:56:56 -0400370 InitDefaultUniformBlock(uniforms, shader, &layoutMap[shaderType],
371 &requiredBufferSize[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400372 }
373
374 // Init the default block layout info.
Jamie Madill77abad82018-10-25 17:03:48 -0400375 const auto &uniforms = mState.getUniforms();
Jamie Madill242c4fe2018-07-12 15:56:56 -0400376 for (const gl::VariableLocation &location : mState.getUniformLocations())
Jamie Madill76e471e2017-10-21 09:56:01 -0400377 {
Jamie Madill33318de2018-05-01 11:22:54 -0400378 vk::ShaderMap<sh::BlockMemberInfo> layoutInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400379
Jamie Madill76e471e2017-10-21 09:56:01 -0400380 if (location.used() && !location.ignored)
381 {
Jamie Madillde03e002017-10-21 14:04:20 -0400382 const auto &uniform = uniforms[location.index];
Geoff Langbeb0c942018-09-27 11:22:23 -0400383 if (!uniform.isSampler())
Jamie Madill76e471e2017-10-21 09:56:01 -0400384 {
Geoff Langbeb0c942018-09-27 11:22:23 -0400385 std::string uniformName = uniform.name;
386 if (uniform.isArray())
Jamie Madill76e471e2017-10-21 09:56:01 -0400387 {
Geoff Langbeb0c942018-09-27 11:22:23 -0400388 // Gets the uniform name without the [0] at the end.
389 uniformName = gl::ParseResourceName(uniformName, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400390 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400391
Geoff Langbeb0c942018-09-27 11:22:23 -0400392 bool found = false;
393
394 for (vk::ShaderType shaderType : vk::AllShaderTypes())
395 {
396 auto it = layoutMap[shaderType].find(uniformName);
397 if (it != layoutMap[shaderType].end())
398 {
399 found = true;
400 layoutInfo[shaderType] = it->second;
401 }
402 }
403
404 ASSERT(found);
405 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400406 }
407
Jamie Madill33318de2018-05-01 11:22:54 -0400408 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400409 {
Jamie Madill33318de2018-05-01 11:22:54 -0400410 mDefaultUniformBlocks[shaderType].uniformLayout.push_back(layoutInfo[shaderType]);
Jamie Madill76e471e2017-10-21 09:56:01 -0400411 }
412 }
413
Jamie Madill33318de2018-05-01 11:22:54 -0400414 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400415 {
Jamie Madill33318de2018-05-01 11:22:54 -0400416 if (requiredBufferSize[shaderType] > 0)
Jamie Madill76e471e2017-10-21 09:56:01 -0400417 {
Jamie Madill33318de2018-05-01 11:22:54 -0400418 if (!mDefaultUniformBlocks[shaderType].uniformData.resize(
419 requiredBufferSize[shaderType]))
Jamie Madill76e471e2017-10-21 09:56:01 -0400420 {
Jamie Madill242c4fe2018-07-12 15:56:56 -0400421 ANGLE_VK_CHECK(contextVk, false, VK_ERROR_OUT_OF_HOST_MEMORY);
Jamie Madill76e471e2017-10-21 09:56:01 -0400422 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400423 size_t minAlignment = static_cast<size_t>(
424 renderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
425
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400426 mDefaultUniformBlocks[shaderType].storage.init(minAlignment, renderer);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400427
428 // Initialize uniform buffer memory to zero by default.
Jamie Madill33318de2018-05-01 11:22:54 -0400429 mDefaultUniformBlocks[shaderType].uniformData.fill(0);
Jamie Madill2b858c22018-09-03 13:58:14 -0400430 mDefaultUniformBlocksDirty.set(shaderType);
Jamie Madill76e471e2017-10-21 09:56:01 -0400431 }
432 }
433
Jamie Madill2b858c22018-09-03 13:58:14 -0400434 if (mDefaultUniformBlocksDirty.any())
Jamie Madill76e471e2017-10-21 09:56:01 -0400435 {
436 // Initialize the "empty" uniform block if necessary.
Jamie Madill2b858c22018-09-03 13:58:14 -0400437 if (!mDefaultUniformBlocksDirty.all())
Jamie Madill76e471e2017-10-21 09:56:01 -0400438 {
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400439 VkBufferCreateInfo uniformBufferInfo = {};
Jamie Madill76e471e2017-10-21 09:56:01 -0400440 uniformBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
Jamie Madill76e471e2017-10-21 09:56:01 -0400441 uniformBufferInfo.flags = 0;
442 uniformBufferInfo.size = 1;
443 uniformBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
444 uniformBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
445 uniformBufferInfo.queueFamilyIndexCount = 0;
446 uniformBufferInfo.pQueueFamilyIndices = nullptr;
447
Jamie Madill21061022018-07-12 23:56:30 -0400448 ANGLE_TRY(mEmptyUniformBlockStorage.buffer.init(contextVk, uniformBufferInfo));
Jamie Madill76e471e2017-10-21 09:56:01 -0400449
Luc Ferron7a06ac12018-03-15 10:17:04 -0400450 // Assume host visible/coherent memory available.
Jamie Madill57dd97a2018-02-06 17:10:49 -0500451 VkMemoryPropertyFlags flags =
452 (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Tobin Ehlisf5aad062018-09-19 14:39:00 -0600453 VkMemoryPropertyFlags flagsOut = 0;
454 ANGLE_TRY(AllocateBufferMemory(contextVk, flags, &flagsOut,
455 &mEmptyUniformBlockStorage.buffer,
Luc Ferrona72ebeb2018-06-28 11:16:58 -0400456 &mEmptyUniformBlockStorage.memory));
Jamie Madill76e471e2017-10-21 09:56:01 -0400457 }
Jamie Madill5547b382017-10-23 18:16:01 -0400458 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400459
Jamie Madill242c4fe2018-07-12 15:56:56 -0400460 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400461}
462
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400463GLboolean ProgramVk::validate(const gl::Caps &caps, gl::InfoLog *infoLog)
464{
Luc Ferronfba1f612018-06-04 14:37:17 -0400465 // No-op. The spec is very vague about the behavior of validation.
466 return GL_TRUE;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400467}
468
Jamie Madill76e471e2017-10-21 09:56:01 -0400469template <typename T>
470void ProgramVk::setUniformImpl(GLint location, GLsizei count, const T *v, GLenum entryPointType)
471{
472 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
473 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
474
Luc Ferron7cec3352018-03-13 13:29:34 -0400475 if (linkedUniform.isSampler())
476 {
Jamie Madill0cb6dc42018-04-16 10:36:39 -0400477 // We could potentially cache some indexing here. For now this is a no-op since the mapping
478 // is handled entirely in ContextVk.
Luc Ferron7cec3352018-03-13 13:29:34 -0400479 return;
480 }
481
Luc Ferron24a31372018-04-04 11:49:14 -0400482 if (linkedUniform.typeInfo->type == entryPointType)
Jamie Madill76e471e2017-10-21 09:56:01 -0400483 {
Jamie Madill2b858c22018-09-03 13:58:14 -0400484 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400485 {
Jamie Madill2b858c22018-09-03 13:58:14 -0400486 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron24a31372018-04-04 11:49:14 -0400487 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400488
Luc Ferron24a31372018-04-04 11:49:14 -0400489 // Assume an offset of -1 means the block is unused.
490 if (layoutInfo.offset == -1)
491 {
492 continue;
493 }
494
495 const GLint componentCount = linkedUniform.typeInfo->componentCount;
Luc Ferron62059a52018-03-29 07:01:35 -0400496 UpdateDefaultUniformBlock(count, locationInfo.arrayIndex, componentCount, v, layoutInfo,
497 &uniformBlock.uniformData);
Jamie Madill2b858c22018-09-03 13:58:14 -0400498 mDefaultUniformBlocksDirty.set(shaderType);
Luc Ferron62059a52018-03-29 07:01:35 -0400499 }
Luc Ferron24a31372018-04-04 11:49:14 -0400500 }
501 else
502 {
Jamie Madill2b858c22018-09-03 13:58:14 -0400503 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Luc Ferron62059a52018-03-29 07:01:35 -0400504 {
Jamie Madill2b858c22018-09-03 13:58:14 -0400505 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron24a31372018-04-04 11:49:14 -0400506 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
507
508 // Assume an offset of -1 means the block is unused.
509 if (layoutInfo.offset == -1)
510 {
511 continue;
512 }
513
514 const GLint componentCount = linkedUniform.typeInfo->componentCount;
515
Luc Ferron62059a52018-03-29 07:01:35 -0400516 ASSERT(linkedUniform.typeInfo->type == gl::VariableBoolVectorType(entryPointType));
517
Luc Ferrond91c3792018-04-06 09:36:36 -0400518 GLint initialArrayOffset =
519 locationInfo.arrayIndex * layoutInfo.arrayStride + layoutInfo.offset;
Luc Ferron62059a52018-03-29 07:01:35 -0400520 for (GLint i = 0; i < count; i++)
521 {
522 GLint elementOffset = i * layoutInfo.arrayStride + initialArrayOffset;
523 GLint *dest =
524 reinterpret_cast<GLint *>(uniformBlock.uniformData.data() + elementOffset);
525 const T *source = v + i * componentCount;
526
527 for (int c = 0; c < componentCount; c++)
528 {
529 dest[c] = (source[c] == static_cast<T>(0)) ? GL_FALSE : GL_TRUE;
530 }
531 }
Jamie Madill2b858c22018-09-03 13:58:14 -0400532
533 mDefaultUniformBlocksDirty.set(shaderType);
Luc Ferron62059a52018-03-29 07:01:35 -0400534 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400535 }
536}
537
Luc Ferron7cec3352018-03-13 13:29:34 -0400538template <typename T>
539void ProgramVk::getUniformImpl(GLint location, T *v, GLenum entryPointType) const
540{
541 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
542 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
543
Jamie Madill3bb2bbe2018-06-15 09:47:03 -0400544 ASSERT(!linkedUniform.isSampler());
Luc Ferron7cec3352018-03-13 13:29:34 -0400545
Olli Etuaho107c7242018-03-20 15:45:35 +0200546 const gl::ShaderType shaderType = linkedUniform.getFirstShaderTypeWhereActive();
Jiawei Shao385b3e02018-03-21 09:43:28 +0800547 ASSERT(shaderType != gl::ShaderType::InvalidEnum);
Luc Ferron7cec3352018-03-13 13:29:34 -0400548
Jiawei Shao385b3e02018-03-21 09:43:28 +0800549 const DefaultUniformBlock &uniformBlock =
Jamie Madill33318de2018-05-01 11:22:54 -0400550 mDefaultUniformBlocks[static_cast<vk::ShaderType>(shaderType)];
Jamie Madill50cf2be2018-06-15 09:46:57 -0400551 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
Luc Ferron62059a52018-03-29 07:01:35 -0400552
553 ASSERT(linkedUniform.typeInfo->componentType == entryPointType ||
554 linkedUniform.typeInfo->componentType == gl::VariableBoolVectorType(entryPointType));
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400555
556 if (gl::IsMatrixType(linkedUniform.type))
557 {
558 const uint8_t *ptrToElement = uniformBlock.uniformData.data() + layoutInfo.offset +
Luc Ferronf6fd48f2018-06-18 08:11:27 -0400559 (locationInfo.arrayIndex * layoutInfo.arrayStride);
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400560 GetMatrixUniform(linkedUniform.type, v, reinterpret_cast<const T *>(ptrToElement), false);
561 }
562 else
563 {
564 ReadFromDefaultUniformBlock(linkedUniform.typeInfo->componentCount, locationInfo.arrayIndex,
565 v, layoutInfo, &uniformBlock.uniformData);
566 }
Luc Ferron7cec3352018-03-13 13:29:34 -0400567}
568
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400569void ProgramVk::setUniform1fv(GLint location, GLsizei count, const GLfloat *v)
570{
Jamie Madill76e471e2017-10-21 09:56:01 -0400571 setUniformImpl(location, count, v, GL_FLOAT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400572}
573
574void ProgramVk::setUniform2fv(GLint location, GLsizei count, const GLfloat *v)
575{
Jamie Madill76e471e2017-10-21 09:56:01 -0400576 setUniformImpl(location, count, v, GL_FLOAT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400577}
578
579void ProgramVk::setUniform3fv(GLint location, GLsizei count, const GLfloat *v)
580{
Jamie Madill76e471e2017-10-21 09:56:01 -0400581 setUniformImpl(location, count, v, GL_FLOAT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400582}
583
584void ProgramVk::setUniform4fv(GLint location, GLsizei count, const GLfloat *v)
585{
Jamie Madill76e471e2017-10-21 09:56:01 -0400586 setUniformImpl(location, count, v, GL_FLOAT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400587}
588
589void ProgramVk::setUniform1iv(GLint location, GLsizei count, const GLint *v)
590{
Luc Ferron7cec3352018-03-13 13:29:34 -0400591 setUniformImpl(location, count, v, GL_INT);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400592}
593
594void ProgramVk::setUniform2iv(GLint location, GLsizei count, const GLint *v)
595{
Luc Ferron489243f2018-03-28 16:55:28 -0400596 setUniformImpl(location, count, v, GL_INT_VEC2);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400597}
598
599void ProgramVk::setUniform3iv(GLint location, GLsizei count, const GLint *v)
600{
Luc Ferron489243f2018-03-28 16:55:28 -0400601 setUniformImpl(location, count, v, GL_INT_VEC3);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400602}
603
604void ProgramVk::setUniform4iv(GLint location, GLsizei count, const GLint *v)
605{
Luc Ferron489243f2018-03-28 16:55:28 -0400606 setUniformImpl(location, count, v, GL_INT_VEC4);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400607}
608
609void ProgramVk::setUniform1uiv(GLint location, GLsizei count, const GLuint *v)
610{
611 UNIMPLEMENTED();
612}
613
614void ProgramVk::setUniform2uiv(GLint location, GLsizei count, const GLuint *v)
615{
616 UNIMPLEMENTED();
617}
618
619void ProgramVk::setUniform3uiv(GLint location, GLsizei count, const GLuint *v)
620{
621 UNIMPLEMENTED();
622}
623
624void ProgramVk::setUniform4uiv(GLint location, GLsizei count, const GLuint *v)
625{
626 UNIMPLEMENTED();
627}
628
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400629template <int cols, int rows>
630void ProgramVk::setUniformMatrixfv(GLint location,
631 GLsizei count,
632 GLboolean transpose,
633 const GLfloat *value)
634{
635 const gl::VariableLocation &locationInfo = mState.getUniformLocations()[location];
636 const gl::LinkedUniform &linkedUniform = mState.getUniforms()[locationInfo.index];
637
Jamie Madill2b858c22018-09-03 13:58:14 -0400638 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400639 {
Jamie Madill2b858c22018-09-03 13:58:14 -0400640 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400641 const sh::BlockMemberInfo &layoutInfo = uniformBlock.uniformLayout[location];
642
643 // Assume an offset of -1 means the block is unused.
644 if (layoutInfo.offset == -1)
645 {
646 continue;
647 }
648
Luc Ferronc8fbff32018-06-04 10:30:48 -0400649 bool updated = SetFloatUniformMatrix<cols, rows>(
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400650 locationInfo.arrayIndex, linkedUniform.getArraySizeProduct(), count, transpose, value,
651 uniformBlock.uniformData.data() + layoutInfo.offset);
Luc Ferronc8fbff32018-06-04 10:30:48 -0400652
653 // If the uniformsDirty flag was true, we don't want to flip it to false here if the
654 // setter did not update any data. We still want the uniform to be included when we'll
655 // update the descriptor sets.
Jamie Madill2b858c22018-09-03 13:58:14 -0400656 if (updated)
657 {
658 mDefaultUniformBlocksDirty.set(shaderType);
659 }
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400660 }
661}
662
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400663void ProgramVk::setUniformMatrix2fv(GLint location,
664 GLsizei count,
665 GLboolean transpose,
666 const GLfloat *value)
667{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400668 setUniformMatrixfv<2, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400669}
670
671void ProgramVk::setUniformMatrix3fv(GLint location,
672 GLsizei count,
673 GLboolean transpose,
674 const GLfloat *value)
675{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400676 setUniformMatrixfv<3, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400677}
678
679void ProgramVk::setUniformMatrix4fv(GLint location,
680 GLsizei count,
681 GLboolean transpose,
682 const GLfloat *value)
683{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400684 setUniformMatrixfv<4, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400685}
686
687void ProgramVk::setUniformMatrix2x3fv(GLint location,
688 GLsizei count,
689 GLboolean transpose,
690 const GLfloat *value)
691{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400692 setUniformMatrixfv<2, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400693}
694
695void ProgramVk::setUniformMatrix3x2fv(GLint location,
696 GLsizei count,
697 GLboolean transpose,
698 const GLfloat *value)
699{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400700 setUniformMatrixfv<3, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400701}
702
703void ProgramVk::setUniformMatrix2x4fv(GLint location,
704 GLsizei count,
705 GLboolean transpose,
706 const GLfloat *value)
707{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400708 setUniformMatrixfv<2, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400709}
710
711void ProgramVk::setUniformMatrix4x2fv(GLint location,
712 GLsizei count,
713 GLboolean transpose,
714 const GLfloat *value)
715{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400716 setUniformMatrixfv<4, 2>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400717}
718
719void ProgramVk::setUniformMatrix3x4fv(GLint location,
720 GLsizei count,
721 GLboolean transpose,
722 const GLfloat *value)
723{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400724 setUniformMatrixfv<3, 4>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400725}
726
727void ProgramVk::setUniformMatrix4x3fv(GLint location,
728 GLsizei count,
729 GLboolean transpose,
730 const GLfloat *value)
731{
Luc Ferron48cdc2e2018-05-31 09:58:34 -0400732 setUniformMatrixfv<4, 3>(location, count, transpose, value);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400733}
734
Sami Väisänen46eaa942016-06-29 10:26:37 +0300735void ProgramVk::setPathFragmentInputGen(const std::string &inputName,
736 GLenum genMode,
737 GLint components,
738 const GLfloat *coeffs)
739{
740 UNIMPLEMENTED();
741}
742
Jamie Madill242c4fe2018-07-12 15:56:56 -0400743angle::Result ProgramVk::initShaders(ContextVk *contextVk,
Jamie Madillbfe31c42018-10-25 17:03:47 -0400744 gl::PrimitiveMode mode,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400745 const vk::ShaderAndSerial **vertexShaderAndSerialOut,
746 const vk::ShaderAndSerial **fragmentShaderAndSerialOut,
747 const vk::PipelineLayout **pipelineLayoutOut)
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500748{
Jamie Madillbfe31c42018-10-25 17:03:47 -0400749 if (UseLineRaster(contextVk, mode))
Jamie Madillb36a4812018-09-25 10:15:11 -0400750 {
751 ANGLE_TRY(mLineRasterShaderInfo.getShaders(contextVk, mVertexSource, mFragmentSource, true,
752 vertexShaderAndSerialOut,
753 fragmentShaderAndSerialOut));
754 ASSERT(mLineRasterShaderInfo.valid());
755 }
756 else
757 {
758 ANGLE_TRY(mDefaultShaderInfo.getShaders(contextVk, mVertexSource, mFragmentSource, false,
759 vertexShaderAndSerialOut,
760 fragmentShaderAndSerialOut));
761 ASSERT(mDefaultShaderInfo.valid());
762 }
Jamie Madill242c4fe2018-07-12 15:56:56 -0400763
764 *pipelineLayoutOut = &mPipelineLayout.get();
765
766 return angle::Result::Continue();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500767}
768
Jamie Madill21061022018-07-12 23:56:30 -0400769angle::Result ProgramVk::allocateDescriptorSet(ContextVk *contextVk, uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400770{
Jamie Madill76e471e2017-10-21 09:56:01 -0400771 // Write out to a new a descriptor set.
Jamie Madilledeaa832018-06-22 09:18:41 -0400772 vk::DynamicDescriptorPool *dynamicDescriptorPool =
773 contextVk->getDynamicDescriptorPool(descriptorSetIndex);
Jamie Madill76e471e2017-10-21 09:56:01 -0400774
Luc Ferron6ea1b412018-03-21 16:13:01 -0400775 uint32_t potentialNewCount = descriptorSetIndex + 1;
776 if (potentialNewCount > mDescriptorSets.size())
777 {
778 mDescriptorSets.resize(potentialNewCount, VK_NULL_HANDLE);
779 }
Luc Ferron7a06ac12018-03-15 10:17:04 -0400780
Jamie Madillc7918ce2018-06-13 13:25:31 -0400781 const vk::DescriptorSetLayout &descriptorSetLayout =
Jamie Madill9b168d02018-06-13 13:25:32 -0400782 mDescriptorSetLayouts[descriptorSetIndex].get();
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400783 ANGLE_TRY(dynamicDescriptorPool->allocateSets(contextVk, descriptorSetLayout.ptr(), 1,
Jamie Madill78bcd2b2018-10-16 15:05:20 -0400784 &mDescriptorPoolBindings[descriptorSetIndex],
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400785 &mDescriptorSets[descriptorSetIndex]));
Jamie Madill21061022018-07-12 23:56:30 -0400786 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400787}
788
Jamie Madill54164b02017-08-28 15:17:37 -0400789void ProgramVk::getUniformfv(const gl::Context *context, GLint location, GLfloat *params) const
790{
Luc Ferron7cec3352018-03-13 13:29:34 -0400791 getUniformImpl(location, params, GL_FLOAT);
Jamie Madill54164b02017-08-28 15:17:37 -0400792}
793
794void ProgramVk::getUniformiv(const gl::Context *context, GLint location, GLint *params) const
795{
Luc Ferron7cec3352018-03-13 13:29:34 -0400796 getUniformImpl(location, params, GL_INT);
Jamie Madill54164b02017-08-28 15:17:37 -0400797}
798
799void ProgramVk::getUniformuiv(const gl::Context *context, GLint location, GLuint *params) const
800{
801 UNIMPLEMENTED();
802}
803
Jamie Madill21061022018-07-12 23:56:30 -0400804angle::Result ProgramVk::updateUniforms(ContextVk *contextVk)
Jamie Madill76e471e2017-10-21 09:56:01 -0400805{
Jamie Madill88fc6da2018-08-30 16:18:36 -0400806 ASSERT(dirtyUniforms());
Jamie Madill76e471e2017-10-21 09:56:01 -0400807
Jamie Madill76e471e2017-10-21 09:56:01 -0400808 // Update buffer memory by immediate mapping. This immediate update only works once.
Luc Ferron7a06ac12018-03-15 10:17:04 -0400809 bool anyNewBufferAllocated = false;
Jamie Madill33318de2018-05-01 11:22:54 -0400810 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400811 {
Jamie Madill33318de2018-05-01 11:22:54 -0400812 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
Luc Ferron7a06ac12018-03-15 10:17:04 -0400813
Jamie Madill2b858c22018-09-03 13:58:14 -0400814 if (mDefaultUniformBlocksDirty[shaderType])
Jamie Madill76e471e2017-10-21 09:56:01 -0400815 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400816 bool bufferModified = false;
Jamie Madill21061022018-07-12 23:56:30 -0400817 ANGLE_TRY(SyncDefaultUniformBlock(contextVk, &uniformBlock.storage,
Luc Ferron7a06ac12018-03-15 10:17:04 -0400818 uniformBlock.uniformData,
Jamie Madill33318de2018-05-01 11:22:54 -0400819 &mUniformBlocksOffsets[shaderType], &bufferModified));
Jamie Madill2b858c22018-09-03 13:58:14 -0400820 mDefaultUniformBlocksDirty.reset(shaderType);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400821
822 if (bufferModified)
823 {
824 anyNewBufferAllocated = true;
825 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400826 }
827 }
828
Luc Ferron7a06ac12018-03-15 10:17:04 -0400829 if (anyNewBufferAllocated)
830 {
831 // We need to reinitialize the descriptor sets if we newly allocated buffers since we can't
832 // modify the descriptor sets once initialized.
Jamie Madillc7918ce2018-06-13 13:25:31 -0400833 ANGLE_TRY(allocateDescriptorSet(contextVk, kUniformsDescriptorSetIndex));
Luc Ferron7a06ac12018-03-15 10:17:04 -0400834 ANGLE_TRY(updateDefaultUniformsDescriptorSet(contextVk));
835 }
836
Jamie Madill21061022018-07-12 23:56:30 -0400837 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400838}
839
Jamie Madill21061022018-07-12 23:56:30 -0400840angle::Result ProgramVk::updateDefaultUniformsDescriptorSet(ContextVk *contextVk)
Jamie Madill76e471e2017-10-21 09:56:01 -0400841{
Jamie Madill33318de2018-05-01 11:22:54 -0400842 vk::ShaderMap<VkDescriptorBufferInfo> descriptorBufferInfo;
843 vk::ShaderMap<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill76e471e2017-10-21 09:56:01 -0400844
Jamie Madill33318de2018-05-01 11:22:54 -0400845 for (vk::ShaderType shaderType : vk::AllShaderTypes())
Jamie Madill76e471e2017-10-21 09:56:01 -0400846 {
Jamie Madill242c4fe2018-07-12 15:56:56 -0400847 DefaultUniformBlock &uniformBlock = mDefaultUniformBlocks[shaderType];
848 VkDescriptorBufferInfo &bufferInfo = descriptorBufferInfo[shaderType];
849 VkWriteDescriptorSet &writeInfo = writeDescriptorInfo[shaderType];
Jamie Madill76e471e2017-10-21 09:56:01 -0400850
851 if (!uniformBlock.uniformData.empty())
852 {
Luc Ferron7a06ac12018-03-15 10:17:04 -0400853 bufferInfo.buffer = uniformBlock.storage.getCurrentBufferHandle();
Jamie Madill76e471e2017-10-21 09:56:01 -0400854 }
855 else
856 {
857 bufferInfo.buffer = mEmptyUniformBlockStorage.buffer.getHandle();
858 }
859
860 bufferInfo.offset = 0;
861 bufferInfo.range = VK_WHOLE_SIZE;
862
Jamie Madill76e471e2017-10-21 09:56:01 -0400863 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
864 writeInfo.pNext = nullptr;
Jamie Madill5547b382017-10-23 18:16:01 -0400865 writeInfo.dstSet = mDescriptorSets[0];
Jamie Madill33318de2018-05-01 11:22:54 -0400866 writeInfo.dstBinding = static_cast<uint32_t>(shaderType);
Jamie Madill76e471e2017-10-21 09:56:01 -0400867 writeInfo.dstArrayElement = 0;
868 writeInfo.descriptorCount = 1;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400869 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill76e471e2017-10-21 09:56:01 -0400870 writeInfo.pImageInfo = nullptr;
871 writeInfo.pBufferInfo = &bufferInfo;
872 writeInfo.pTexelBufferView = nullptr;
Jamie Madill76e471e2017-10-21 09:56:01 -0400873 }
874
875 VkDevice device = contextVk->getDevice();
876
Jamie Madill33318de2018-05-01 11:22:54 -0400877 vkUpdateDescriptorSets(device, 2, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400878
Jamie Madill21061022018-07-12 23:56:30 -0400879 return angle::Result::Continue();
Jamie Madill76e471e2017-10-21 09:56:01 -0400880}
881
Jamie Madill84c662b2018-07-12 15:56:55 -0400882angle::Result ProgramVk::updateTexturesDescriptorSet(ContextVk *contextVk)
Jamie Madill5547b382017-10-23 18:16:01 -0400883{
Jamie Madill88fc6da2018-08-30 16:18:36 -0400884 ASSERT(hasTextures());
Jamie Madillc7918ce2018-06-13 13:25:31 -0400885 ANGLE_TRY(allocateDescriptorSet(contextVk, kTextureDescriptorSetIndex));
Luc Ferron6ea1b412018-03-21 16:13:01 -0400886
Jamie Madill8c3988c2017-12-21 14:44:56 -0500887 ASSERT(mUsedDescriptorSetRange.contains(1));
Jamie Madillc7918ce2018-06-13 13:25:31 -0400888 VkDescriptorSet descriptorSet = mDescriptorSets[kTextureDescriptorSetIndex];
Jamie Madill5547b382017-10-23 18:16:01 -0400889
Jamie Madill84c662b2018-07-12 15:56:55 -0400890 gl::ActiveTextureArray<VkDescriptorImageInfo> descriptorImageInfo;
891 gl::ActiveTextureArray<VkWriteDescriptorSet> writeDescriptorInfo;
Jamie Madill4cc753e2018-06-13 13:25:33 -0400892 uint32_t writeCount = 0;
Jamie Madill5547b382017-10-23 18:16:01 -0400893
Jamie Madill84c662b2018-07-12 15:56:55 -0400894 const gl::ActiveTextureArray<TextureVk *> &activeTextures = contextVk->getActiveTextures();
Jamie Madill5547b382017-10-23 18:16:01 -0400895
Jamie Madill4cc753e2018-06-13 13:25:33 -0400896 for (uint32_t textureIndex = 0; textureIndex < mState.getSamplerBindings().size();
897 ++textureIndex)
Jamie Madill5547b382017-10-23 18:16:01 -0400898 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400899 const gl::SamplerBinding &samplerBinding = mState.getSamplerBindings()[textureIndex];
900
Jamie Madill5547b382017-10-23 18:16:01 -0400901 ASSERT(!samplerBinding.unreferenced);
902
Jamie Madill4cc753e2018-06-13 13:25:33 -0400903 for (uint32_t arrayElement = 0; arrayElement < samplerBinding.boundTextureUnits.size();
904 ++arrayElement)
Luc Ferron90968362018-05-04 08:47:22 -0400905 {
Jamie Madill84c662b2018-07-12 15:56:55 -0400906 GLuint textureUnit = samplerBinding.boundTextureUnits[arrayElement];
907 TextureVk *textureVk = activeTextures[textureUnit];
Jamie Madill4cc753e2018-06-13 13:25:33 -0400908 const vk::ImageHelper &image = textureVk->getImage();
909
910 VkDescriptorImageInfo &imageInfo = descriptorImageInfo[writeCount];
911
912 imageInfo.sampler = textureVk->getSampler().getHandle();
913 imageInfo.imageView = textureVk->getImageView().getHandle();
914 imageInfo.imageLayout = image.getCurrentLayout();
915
916 VkWriteDescriptorSet &writeInfo = writeDescriptorInfo[writeCount];
917
918 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
919 writeInfo.pNext = nullptr;
920 writeInfo.dstSet = descriptorSet;
921 writeInfo.dstBinding = textureIndex;
922 writeInfo.dstArrayElement = arrayElement;
923 writeInfo.descriptorCount = 1;
924 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
925 writeInfo.pImageInfo = &imageInfo;
926 writeInfo.pBufferInfo = nullptr;
927 writeInfo.pTexelBufferView = nullptr;
928
929 writeCount++;
Luc Ferron90968362018-05-04 08:47:22 -0400930 }
Jamie Madill5547b382017-10-23 18:16:01 -0400931 }
932
933 VkDevice device = contextVk->getDevice();
934
Jamie Madill4cc753e2018-06-13 13:25:33 -0400935 ASSERT(writeCount > 0);
936 vkUpdateDescriptorSets(device, writeCount, writeDescriptorInfo.data(), 0, nullptr);
Jamie Madill5547b382017-10-23 18:16:01 -0400937
Jamie Madill84c662b2018-07-12 15:56:55 -0400938 return angle::Result::Continue();
Jamie Madill5547b382017-10-23 18:16:01 -0400939}
940
Luc Ferron7a06ac12018-03-15 10:17:04 -0400941void ProgramVk::setDefaultUniformBlocksMinSizeForTesting(size_t minSize)
942{
943 for (DefaultUniformBlock &block : mDefaultUniformBlocks)
944 {
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400945 block.storage.setMinimumSizeForTesting(minSize);
Luc Ferron7a06ac12018-03-15 10:17:04 -0400946 }
947}
Jamie Madill242c4fe2018-07-12 15:56:56 -0400948
949angle::Result ProgramVk::updateDescriptorSets(ContextVk *contextVk,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400950 vk::CommandBuffer *commandBuffer)
951{
Jamie Madill242c4fe2018-07-12 15:56:56 -0400952 // Can probably use better dirty bits here.
Jamie Madill242c4fe2018-07-12 15:56:56 -0400953
954 if (mUsedDescriptorSetRange.empty())
955 return angle::Result::Continue();
956
957 ASSERT(!mDescriptorSets.empty());
958
959 unsigned int low = mUsedDescriptorSetRange.low();
960
961 // No uniforms descriptor set means no need to specify dynamic buffer offsets.
962 if (mUsedDescriptorSetRange.contains(kUniformsDescriptorSetIndex))
963 {
964 commandBuffer->bindDescriptorSets(
965 VK_PIPELINE_BIND_POINT_GRAPHICS, mPipelineLayout.get(), low,
966 mUsedDescriptorSetRange.length(), &mDescriptorSets[low],
967 static_cast<uint32_t>(mUniformBlocksOffsets.size()), mUniformBlocksOffsets.data());
968 }
969 else
970 {
971 commandBuffer->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, mPipelineLayout.get(),
972 low, mUsedDescriptorSetRange.length(),
973 &mDescriptorSets[low], 0, nullptr);
974 }
975
976 return angle::Result::Continue();
977}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400978} // namespace rx