blob: 74c4d6df1a5522183c312f0a1149987d09f4b3ed [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkDescriptorSetManager.h"
egdaniela95220d2016-07-21 11:50:37 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/vk/GrVkDescriptorPool.h"
11#include "src/gpu/vk/GrVkDescriptorSet.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040020 SkSTArray<1, uint32_t> visibilities;
21 uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040022 if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040023 stages |= kGeometry_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040024 }
Ethan Nicholas0be34802019-08-15 12:36:58 -040025 visibilities.push_back(stages);
Greg Daniel7a82edf2018-12-04 10:54:34 -050026
27 SkTArray<const GrVkSampler*> samplers;
28 return new GrVkDescriptorSetManager(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities,
29 samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040030}
31
32GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
33 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
34 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050035 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040036 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
37 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
38 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050039 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070040 }
Greg Daniel7a82edf2018-12-04 10:54:34 -050041 return new GrVkDescriptorSetManager(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040042}
43
44GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
45 GrVkGpu* gpu, VkDescriptorType type, const SkTArray<uint32_t>& visibilities) {
Greg Daniel7a82edf2018-12-04 10:54:34 -050046 SkSTArray<4, const GrVkSampler*> immutableSamplers;
47 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
48 for (int i = 0 ; i < visibilities.count(); ++i) {
49 immutableSamplers.push_back(nullptr);
50 }
51 return new GrVkDescriptorSetManager(gpu, type, visibilities, immutableSamplers);
egdaniela95220d2016-07-21 11:50:37 -070052}
53
Greg Daniel7a82edf2018-12-04 10:54:34 -050054GrVkDescriptorSetManager::GrVkDescriptorSetManager(
55 GrVkGpu* gpu, VkDescriptorType type,
56 const SkTArray<uint32_t>& visibilities,
57 const SkTArray<const GrVkSampler*>& immutableSamplers)
58 : fPoolManager(type, gpu, visibilities, immutableSamplers) {
59#ifdef SK_DEBUG
60 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
61 SkASSERT(visibilities.count() == immutableSamplers.count());
62 } else {
63 SkASSERT(immutableSamplers.count() == 0);
64 }
65#endif
Greg Daniel18f96022017-05-04 15:09:03 -040066 for (int i = 0; i < visibilities.count(); ++i) {
67 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -070068 }
Greg Daniel7a82edf2018-12-04 10:54:34 -050069 for (int i = 0; i < immutableSamplers.count(); ++i) {
70 const GrVkSampler* sampler = immutableSamplers[i];
71 if (sampler) {
72 sampler->ref();
73 }
74 fImmutableSamplers.push_back(sampler);
75 }
egdaniel4d866df2016-08-25 13:52:00 -070076}
77
egdaniela95220d2016-07-21 11:50:37 -070078const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
79 const Handle& handle) {
80 const GrVkDescriptorSet* ds = nullptr;
81 int count = fFreeSets.count();
82 if (count > 0) {
83 ds = fFreeSets[count - 1];
84 fFreeSets.removeShuffle(count - 1);
85 } else {
86 VkDescriptorSet vkDS;
87 fPoolManager.getNewDescriptorSet(gpu, &vkDS);
88
89 ds = new GrVkDescriptorSet(vkDS, fPoolManager.fPool, handle);
90 }
91 SkASSERT(ds);
92 return ds;
93}
94
95void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
96 SkASSERT(descSet);
97 fFreeSets.push_back(descSet);
98}
99
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500100void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700101 fPoolManager.freeGPUResources(gpu);
102
103 for (int i = 0; i < fFreeSets.count(); ++i) {
104 fFreeSets[i]->unref(gpu);
105 }
106 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500107
108 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
109 if (fImmutableSamplers[i]) {
110 fImmutableSamplers[i]->unref(gpu);
111 }
112 }
113 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700114}
115
116void GrVkDescriptorSetManager::abandon() {
117 fPoolManager.abandonGPUResources();
118
119 for (int i = 0; i < fFreeSets.count(); ++i) {
120 fFreeSets[i]->unrefAndAbandon();
121 }
122 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500123
124 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
125 if (fImmutableSamplers[i]) {
126 fImmutableSamplers[i]->unrefAndAbandon();
127 }
128 }
129 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700130}
131
egdaniel707bbd62016-07-26 07:19:47 -0700132bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
133 const GrVkUniformHandler* uniHandler) const {
134 SkASSERT(uniHandler);
135 if (type != fPoolManager.fDescType) {
136 return false;
137 }
138
Brian Salomon662ea4b2018-07-12 14:53:49 -0400139 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
140 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
141 return false;
142 }
143 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500144 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
145 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700146 return false;
147 }
egdaniel707bbd62016-07-26 07:19:47 -0700148 }
149 return true;
150}
151
egdaniel4d866df2016-08-25 13:52:00 -0700152bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
153 const SkTArray<uint32_t>& visibilities) const {
154 if (type != fPoolManager.fDescType) {
155 return false;
156 }
157
Greg Daniela7543782017-05-02 14:01:43 -0400158 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
159 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
egdaniel4d866df2016-08-25 13:52:00 -0700160 if (fBindingVisibilities.count() != visibilities.count()) {
161 return false;
162 }
163 for (int i = 0; i < visibilities.count(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500164 if (visibilities[i] != fBindingVisibilities[i] || fImmutableSamplers[i] != nullptr) {
egdaniel4d866df2016-08-25 13:52:00 -0700165 return false;
166 }
167 }
168 }
169 return true;
170}
171
egdaniela95220d2016-07-21 11:50:37 -0700172////////////////////////////////////////////////////////////////////////////////
173
egdaniel707bbd62016-07-26 07:19:47 -0700174VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
175 VkShaderStageFlags flags = 0;
176
177 if (visibility & kVertex_GrShaderFlag) {
178 flags |= VK_SHADER_STAGE_VERTEX_BIT;
179 }
180 if (visibility & kGeometry_GrShaderFlag) {
181 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
182 }
183 if (visibility & kFragment_GrShaderFlag) {
184 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
185 }
186 return flags;
187}
188
189GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
190 VkDescriptorType type,
191 GrVkGpu* gpu,
Greg Daniel7a82edf2018-12-04 10:54:34 -0500192 const SkTArray<uint32_t>& visibilities,
193 const SkTArray<const GrVkSampler*>& immutableSamplers)
egdaniel4d866df2016-08-25 13:52:00 -0700194 : fDescType(type)
195 , fCurrentDescriptorCount(0)
196 , fPool(nullptr) {
egdaniel4d866df2016-08-25 13:52:00 -0700197
Greg Daniela7543782017-05-02 14:01:43 -0400198
Greg Daniela7543782017-05-02 14:01:43 -0400199 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
Greg Daniel18f96022017-05-04 15:09:03 -0400200 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
201 uint32_t numBindings = visibilities.count();
202 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
203 new VkDescriptorSetLayoutBinding[numBindings]);
204 for (uint32_t i = 0; i < numBindings; ++i) {
205 uint32_t visibility = visibilities[i];
206 dsSamplerBindings[i].binding = i;
207 dsSamplerBindings[i].descriptorType = type;
208 dsSamplerBindings[i].descriptorCount = 1;
209 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
Greg Daniel7a82edf2018-12-04 10:54:34 -0500210 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
211 if (immutableSamplers[i]) {
212 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
213 } else {
214 dsSamplerBindings[i].pImmutableSamplers = nullptr;
215 }
216 }
egdaniel4d866df2016-08-25 13:52:00 -0700217 }
egdaniel707bbd62016-07-26 07:19:47 -0700218
Greg Daniel18f96022017-05-04 15:09:03 -0400219 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
220 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
221 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
222 dsSamplerLayoutCreateInfo.pNext = nullptr;
223 dsSamplerLayoutCreateInfo.flags = 0;
224 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
225 // Setting to nullptr fixes an error in the param checker validation layer. Even though
226 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
227 // null.
228 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
229
Ben Wagner6c30e742019-02-06 10:46:14 -0500230#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
231 // skia:8713
232 __lsan::ScopedDisabler lsanDisabler;
233#endif
Greg Daniele643da62019-11-05 12:36:42 -0500234 GR_VK_CALL_ERRCHECK(gpu, CreateDescriptorSetLayout(gpu->device(),
235 &dsSamplerLayoutCreateInfo,
236 nullptr,
237 &fDescLayout));
Greg Daniel18f96022017-05-04 15:09:03 -0400238 fDescCountPerSet = visibilities.count();
egdaniel707bbd62016-07-26 07:19:47 -0700239 } else {
Greg Daniela7543782017-05-02 14:01:43 -0400240 SkASSERT(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type);
Ethan Nicholas0be34802019-08-15 12:36:58 -0400241 GR_STATIC_ASSERT(1 == kUniformDescPerSet);
Greg Daniel18f96022017-05-04 15:09:03 -0400242 SkASSERT(kUniformDescPerSet == visibilities.count());
egdaniel707bbd62016-07-26 07:19:47 -0700243 // Create Uniform Buffer Descriptor
Ethan Nicholas0be34802019-08-15 12:36:58 -0400244 VkDescriptorSetLayoutBinding dsUniBinding;
245 memset(&dsUniBinding, 0, sizeof(dsUniBinding));
246 dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
247 dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
248 dsUniBinding.descriptorCount = 1;
249 dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
250 dsUniBinding.pImmutableSamplers = nullptr;
egdaniel707bbd62016-07-26 07:19:47 -0700251
252 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
253 memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
254 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
255 uniformLayoutCreateInfo.pNext = nullptr;
256 uniformLayoutCreateInfo.flags = 0;
Ethan Nicholas0be34802019-08-15 12:36:58 -0400257 uniformLayoutCreateInfo.bindingCount = 1;
258 uniformLayoutCreateInfo.pBindings = &dsUniBinding;
egdaniel707bbd62016-07-26 07:19:47 -0700259
Ben Wagner6c30e742019-02-06 10:46:14 -0500260#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
261 // skia:8713
262 __lsan::ScopedDisabler lsanDisabler;
263#endif
Greg Daniele643da62019-11-05 12:36:42 -0500264 GR_VK_CALL_ERRCHECK(gpu, CreateDescriptorSetLayout(gpu->device(),
265 &uniformLayoutCreateInfo,
266 nullptr,
267 &fDescLayout));
egdaniel707bbd62016-07-26 07:19:47 -0700268 fDescCountPerSet = kUniformDescPerSet;
269 }
270
271 SkASSERT(fDescCountPerSet < kStartNumDescriptors);
272 fMaxDescriptors = kStartNumDescriptors;
273 SkASSERT(fMaxDescriptors > 0);
274 this->getNewPool(gpu);
275}
276
egdaniela95220d2016-07-21 11:50:37 -0700277void GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
278 if (fPool) {
279 fPool->unref(gpu);
280 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
281 if (newPoolSize < kMaxDescriptors) {
282 fMaxDescriptors = newPoolSize;
283 } else {
284 fMaxDescriptors = kMaxDescriptors;
285 }
286
287 }
288 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
289 fMaxDescriptors);
290 SkASSERT(fPool);
291}
292
293void GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
294 VkDescriptorSet* ds) {
295 if (!fMaxDescriptors) {
296 return;
297 }
298 fCurrentDescriptorCount += fDescCountPerSet;
299 if (fCurrentDescriptorCount > fMaxDescriptors) {
300 this->getNewPool(gpu);
301 fCurrentDescriptorCount = fDescCountPerSet;
302 }
303
304 VkDescriptorSetAllocateInfo dsAllocateInfo;
305 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
306 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
307 dsAllocateInfo.pNext = nullptr;
308 dsAllocateInfo.descriptorPool = fPool->descPool();
309 dsAllocateInfo.descriptorSetCount = 1;
310 dsAllocateInfo.pSetLayouts = &fDescLayout;
Greg Daniele643da62019-11-05 12:36:42 -0500311 GR_VK_CALL_ERRCHECK(gpu, AllocateDescriptorSets(gpu->device(), &dsAllocateInfo, ds));
egdaniela95220d2016-07-21 11:50:37 -0700312}
313
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500314void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700315 if (fDescLayout) {
316 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
317 nullptr));
318 fDescLayout = VK_NULL_HANDLE;
319 }
egdaniela95220d2016-07-21 11:50:37 -0700320
321 if (fPool) {
322 fPool->unref(gpu);
323 fPool = nullptr;
324 }
325}
326
327void GrVkDescriptorSetManager::DescriptorPoolManager::abandonGPUResources() {
328 fDescLayout = VK_NULL_HANDLE;
329 if (fPool) {
330 fPool->unrefAndAbandon();
331 fPool = nullptr;
332 }
333}