blob: 120f3a2ca4658d919dbb35461bba68c91301f3e0 [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkDescriptorSetManager.h"
9
10#include "GrVkDescriptorPool.h"
11#include "GrVkDescriptorSet.h"
12#include "GrVkGpu.h"
egdaniel707bbd62016-07-26 07:19:47 -070013#include "GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
20 SkSTArray<2, uint32_t> visibilities;
21 // We set the visibility of the first binding to all supported geometry processing shader
22 // stages (vertex, tesselation, geometry, etc.) and the second binding to the fragment
23 // shader.
24 uint32_t geomStages = kVertex_GrShaderFlag;
25 if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
26 geomStages |= kGeometry_GrShaderFlag;
27 }
28 visibilities.push_back(geomStages);
29 visibilities.push_back(kFragment_GrShaderFlag);
Greg Daniel7a82edf2018-12-04 10:54:34 -050030
31 SkTArray<const GrVkSampler*> samplers;
32 return new GrVkDescriptorSetManager(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities,
33 samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040034}
35
36GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
37 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
38 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050039 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040040 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
41 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
42 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050043 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070044 }
Greg Daniel7a82edf2018-12-04 10:54:34 -050045 return new GrVkDescriptorSetManager(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040046}
47
48GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
49 GrVkGpu* gpu, VkDescriptorType type, const SkTArray<uint32_t>& visibilities) {
Greg Daniel7a82edf2018-12-04 10:54:34 -050050 SkSTArray<4, const GrVkSampler*> immutableSamplers;
51 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
52 for (int i = 0 ; i < visibilities.count(); ++i) {
53 immutableSamplers.push_back(nullptr);
54 }
55 return new GrVkDescriptorSetManager(gpu, type, visibilities, immutableSamplers);
egdaniela95220d2016-07-21 11:50:37 -070056}
57
Greg Daniel7a82edf2018-12-04 10:54:34 -050058GrVkDescriptorSetManager::GrVkDescriptorSetManager(
59 GrVkGpu* gpu, VkDescriptorType type,
60 const SkTArray<uint32_t>& visibilities,
61 const SkTArray<const GrVkSampler*>& immutableSamplers)
62 : fPoolManager(type, gpu, visibilities, immutableSamplers) {
63#ifdef SK_DEBUG
64 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
65 SkASSERT(visibilities.count() == immutableSamplers.count());
66 } else {
67 SkASSERT(immutableSamplers.count() == 0);
68 }
69#endif
Greg Daniel18f96022017-05-04 15:09:03 -040070 for (int i = 0; i < visibilities.count(); ++i) {
71 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -070072 }
Greg Daniel7a82edf2018-12-04 10:54:34 -050073 for (int i = 0; i < immutableSamplers.count(); ++i) {
74 const GrVkSampler* sampler = immutableSamplers[i];
75 if (sampler) {
76 sampler->ref();
77 }
78 fImmutableSamplers.push_back(sampler);
79 }
egdaniel4d866df2016-08-25 13:52:00 -070080}
81
egdaniela95220d2016-07-21 11:50:37 -070082const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
83 const Handle& handle) {
84 const GrVkDescriptorSet* ds = nullptr;
85 int count = fFreeSets.count();
86 if (count > 0) {
87 ds = fFreeSets[count - 1];
88 fFreeSets.removeShuffle(count - 1);
89 } else {
90 VkDescriptorSet vkDS;
91 fPoolManager.getNewDescriptorSet(gpu, &vkDS);
92
93 ds = new GrVkDescriptorSet(vkDS, fPoolManager.fPool, handle);
94 }
95 SkASSERT(ds);
96 return ds;
97}
98
99void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
100 SkASSERT(descSet);
101 fFreeSets.push_back(descSet);
102}
103
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500104void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700105 fPoolManager.freeGPUResources(gpu);
106
107 for (int i = 0; i < fFreeSets.count(); ++i) {
108 fFreeSets[i]->unref(gpu);
109 }
110 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500111
112 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
113 if (fImmutableSamplers[i]) {
114 fImmutableSamplers[i]->unref(gpu);
115 }
116 }
117 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700118}
119
120void GrVkDescriptorSetManager::abandon() {
121 fPoolManager.abandonGPUResources();
122
123 for (int i = 0; i < fFreeSets.count(); ++i) {
124 fFreeSets[i]->unrefAndAbandon();
125 }
126 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500127
128 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
129 if (fImmutableSamplers[i]) {
130 fImmutableSamplers[i]->unrefAndAbandon();
131 }
132 }
133 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700134}
135
egdaniel707bbd62016-07-26 07:19:47 -0700136bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
137 const GrVkUniformHandler* uniHandler) const {
138 SkASSERT(uniHandler);
139 if (type != fPoolManager.fDescType) {
140 return false;
141 }
142
Brian Salomon662ea4b2018-07-12 14:53:49 -0400143 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
144 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
145 return false;
146 }
147 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500148 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
149 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700150 return false;
151 }
egdaniel707bbd62016-07-26 07:19:47 -0700152 }
153 return true;
154}
155
egdaniel4d866df2016-08-25 13:52:00 -0700156bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
157 const SkTArray<uint32_t>& visibilities) const {
158 if (type != fPoolManager.fDescType) {
159 return false;
160 }
161
Greg Daniela7543782017-05-02 14:01:43 -0400162 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
163 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
egdaniel4d866df2016-08-25 13:52:00 -0700164 if (fBindingVisibilities.count() != visibilities.count()) {
165 return false;
166 }
167 for (int i = 0; i < visibilities.count(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500168 if (visibilities[i] != fBindingVisibilities[i] || fImmutableSamplers[i] != nullptr) {
egdaniel4d866df2016-08-25 13:52:00 -0700169 return false;
170 }
171 }
172 }
173 return true;
174}
175
egdaniela95220d2016-07-21 11:50:37 -0700176////////////////////////////////////////////////////////////////////////////////
177
egdaniel707bbd62016-07-26 07:19:47 -0700178VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
179 VkShaderStageFlags flags = 0;
180
181 if (visibility & kVertex_GrShaderFlag) {
182 flags |= VK_SHADER_STAGE_VERTEX_BIT;
183 }
184 if (visibility & kGeometry_GrShaderFlag) {
185 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
186 }
187 if (visibility & kFragment_GrShaderFlag) {
188 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
189 }
190 return flags;
191}
192
193GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
194 VkDescriptorType type,
195 GrVkGpu* gpu,
Greg Daniel7a82edf2018-12-04 10:54:34 -0500196 const SkTArray<uint32_t>& visibilities,
197 const SkTArray<const GrVkSampler*>& immutableSamplers)
egdaniel4d866df2016-08-25 13:52:00 -0700198 : fDescType(type)
199 , fCurrentDescriptorCount(0)
200 , fPool(nullptr) {
egdaniel4d866df2016-08-25 13:52:00 -0700201
Greg Daniela7543782017-05-02 14:01:43 -0400202
Greg Daniela7543782017-05-02 14:01:43 -0400203 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
Greg Daniel18f96022017-05-04 15:09:03 -0400204 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
205 uint32_t numBindings = visibilities.count();
206 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
207 new VkDescriptorSetLayoutBinding[numBindings]);
208 for (uint32_t i = 0; i < numBindings; ++i) {
209 uint32_t visibility = visibilities[i];
210 dsSamplerBindings[i].binding = i;
211 dsSamplerBindings[i].descriptorType = type;
212 dsSamplerBindings[i].descriptorCount = 1;
213 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
Greg Daniel7a82edf2018-12-04 10:54:34 -0500214 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
215 if (immutableSamplers[i]) {
216 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
217 } else {
218 dsSamplerBindings[i].pImmutableSamplers = nullptr;
219 }
220 }
egdaniel4d866df2016-08-25 13:52:00 -0700221 }
egdaniel707bbd62016-07-26 07:19:47 -0700222
Greg Daniel18f96022017-05-04 15:09:03 -0400223 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
224 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
225 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
226 dsSamplerLayoutCreateInfo.pNext = nullptr;
227 dsSamplerLayoutCreateInfo.flags = 0;
228 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
229 // Setting to nullptr fixes an error in the param checker validation layer. Even though
230 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
231 // null.
232 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
233
Ben Wagner6c30e742019-02-06 10:46:14 -0500234#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
235 // skia:8713
236 __lsan::ScopedDisabler lsanDisabler;
237#endif
Greg Daniel18f96022017-05-04 15:09:03 -0400238 GR_VK_CALL_ERRCHECK(gpu->vkInterface(),
239 CreateDescriptorSetLayout(gpu->device(),
240 &dsSamplerLayoutCreateInfo,
241 nullptr,
242 &fDescLayout));
243 fDescCountPerSet = visibilities.count();
egdaniel707bbd62016-07-26 07:19:47 -0700244 } else {
Greg Daniela7543782017-05-02 14:01:43 -0400245 SkASSERT(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type);
Greg Daniel18f96022017-05-04 15:09:03 -0400246 GR_STATIC_ASSERT(2 == kUniformDescPerSet);
247 SkASSERT(kUniformDescPerSet == visibilities.count());
egdaniel707bbd62016-07-26 07:19:47 -0700248 // Create Uniform Buffer Descriptor
Greg Daniel18f96022017-05-04 15:09:03 -0400249 static const uint32_t bindings[kUniformDescPerSet] =
250 { GrVkUniformHandler::kGeometryBinding, GrVkUniformHandler::kFragBinding };
egdaniel707bbd62016-07-26 07:19:47 -0700251 VkDescriptorSetLayoutBinding dsUniBindings[kUniformDescPerSet];
Greg Daniel18f96022017-05-04 15:09:03 -0400252 memset(&dsUniBindings, 0, kUniformDescPerSet * sizeof(VkDescriptorSetLayoutBinding));
253 for (int i = 0; i < kUniformDescPerSet; ++i) {
254 dsUniBindings[i].binding = bindings[i];
255 dsUniBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
256 dsUniBindings[i].descriptorCount = 1;
257 dsUniBindings[i].stageFlags = visibility_to_vk_stage_flags(visibilities[i]);
258 dsUniBindings[i].pImmutableSamplers = nullptr;
259 }
egdaniel707bbd62016-07-26 07:19:47 -0700260
261 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
262 memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
263 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
264 uniformLayoutCreateInfo.pNext = nullptr;
265 uniformLayoutCreateInfo.flags = 0;
266 uniformLayoutCreateInfo.bindingCount = 2;
267 uniformLayoutCreateInfo.pBindings = dsUniBindings;
268
Ben Wagner6c30e742019-02-06 10:46:14 -0500269#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
270 // skia:8713
271 __lsan::ScopedDisabler lsanDisabler;
272#endif
egdaniel707bbd62016-07-26 07:19:47 -0700273 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateDescriptorSetLayout(gpu->device(),
274 &uniformLayoutCreateInfo,
275 nullptr,
276 &fDescLayout));
277 fDescCountPerSet = kUniformDescPerSet;
278 }
279
280 SkASSERT(fDescCountPerSet < kStartNumDescriptors);
281 fMaxDescriptors = kStartNumDescriptors;
282 SkASSERT(fMaxDescriptors > 0);
283 this->getNewPool(gpu);
284}
285
egdaniela95220d2016-07-21 11:50:37 -0700286void GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
287 if (fPool) {
288 fPool->unref(gpu);
289 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
290 if (newPoolSize < kMaxDescriptors) {
291 fMaxDescriptors = newPoolSize;
292 } else {
293 fMaxDescriptors = kMaxDescriptors;
294 }
295
296 }
297 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
298 fMaxDescriptors);
299 SkASSERT(fPool);
300}
301
302void GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
303 VkDescriptorSet* ds) {
304 if (!fMaxDescriptors) {
305 return;
306 }
307 fCurrentDescriptorCount += fDescCountPerSet;
308 if (fCurrentDescriptorCount > fMaxDescriptors) {
309 this->getNewPool(gpu);
310 fCurrentDescriptorCount = fDescCountPerSet;
311 }
312
313 VkDescriptorSetAllocateInfo dsAllocateInfo;
314 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
315 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
316 dsAllocateInfo.pNext = nullptr;
317 dsAllocateInfo.descriptorPool = fPool->descPool();
318 dsAllocateInfo.descriptorSetCount = 1;
319 dsAllocateInfo.pSetLayouts = &fDescLayout;
320 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), AllocateDescriptorSets(gpu->device(),
321 &dsAllocateInfo,
322 ds));
323}
324
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500325void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700326 if (fDescLayout) {
327 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
328 nullptr));
329 fDescLayout = VK_NULL_HANDLE;
330 }
egdaniela95220d2016-07-21 11:50:37 -0700331
332 if (fPool) {
333 fPool->unref(gpu);
334 fPool = nullptr;
335 }
336}
337
338void GrVkDescriptorSetManager::DescriptorPoolManager::abandonGPUResources() {
339 fDescLayout = VK_NULL_HANDLE;
340 if (fPool) {
341 fPool->unrefAndAbandon();
342 fPool = nullptr;
343 }
344}