blob: ae99342a19a1d048836d5454b0df7d66ae4a92e3 [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkDescriptorSetManager.h"
egdaniela95220d2016-07-21 11:50:37 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/vk/GrVkDescriptorPool.h"
11#include "src/gpu/vk/GrVkDescriptorSet.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040020 SkSTArray<1, uint32_t> visibilities;
21 uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040022 if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040023 stages |= kGeometry_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040024 }
Ethan Nicholas0be34802019-08-15 12:36:58 -040025 visibilities.push_back(stages);
Greg Daniel7a82edf2018-12-04 10:54:34 -050026 SkTArray<const GrVkSampler*> samplers;
Greg Daniel9b63dc82019-11-06 09:21:55 -050027 return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040028}
29
30GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
31 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
32 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050033 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040034 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
35 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
36 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050037 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070038 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050039 return Create(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040040}
41
Greg Daniela8c32102020-12-30 15:09:32 -050042GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateZeroSamplerManager(GrVkGpu* gpu) {
43 SkTArray<uint32_t> visibilities;
44 SkTArray<const GrVkSampler*> immutableSamplers;
45 return Create(gpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, visibilities, immutableSamplers);
46}
47
Greg Danielf32fec12020-09-08 13:05:32 -040048GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateInputManager(GrVkGpu* gpu) {
49 SkSTArray<1, uint32_t> visibilities;
50 visibilities.push_back(kFragment_GrShaderFlag);
51 SkTArray<const GrVkSampler*> samplers;
52 return Create(gpu, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, visibilities, samplers);
53}
54
Greg Daniel9b63dc82019-11-06 09:21:55 -050055VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
56 VkShaderStageFlags flags = 0;
57
58 if (visibility & kVertex_GrShaderFlag) {
59 flags |= VK_SHADER_STAGE_VERTEX_BIT;
60 }
61 if (visibility & kGeometry_GrShaderFlag) {
62 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
63 }
64 if (visibility & kFragment_GrShaderFlag) {
65 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
66 }
67 return flags;
68}
69
70static bool get_layout_and_desc_count(GrVkGpu* gpu,
71 VkDescriptorType type,
72 const SkTArray<uint32_t>& visibilities,
73 const SkTArray<const GrVkSampler*>& immutableSamplers,
74 VkDescriptorSetLayout* descSetLayout,
75 uint32_t* descCountPerSet) {
76 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
77 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
78 uint32_t numBindings = visibilities.count();
79 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
80 new VkDescriptorSetLayoutBinding[numBindings]);
81 for (uint32_t i = 0; i < numBindings; ++i) {
82 uint32_t visibility = visibilities[i];
83 dsSamplerBindings[i].binding = i;
84 dsSamplerBindings[i].descriptorType = type;
85 dsSamplerBindings[i].descriptorCount = 1;
86 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
87 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
88 if (immutableSamplers[i]) {
89 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
90 } else {
91 dsSamplerBindings[i].pImmutableSamplers = nullptr;
92 }
93 }
94 }
95
96 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
97 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
98 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
99 dsSamplerLayoutCreateInfo.pNext = nullptr;
100 dsSamplerLayoutCreateInfo.flags = 0;
101 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
102 // Setting to nullptr fixes an error in the param checker validation layer. Even though
103 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
104 // null.
105 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
106
107#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
108 // skia:8713
109 __lsan::ScopedDisabler lsanDisabler;
110#endif
111 VkResult result;
112 GR_VK_CALL_RESULT(gpu, result,
113 CreateDescriptorSetLayout(gpu->device(),
114 &dsSamplerLayoutCreateInfo,
115 nullptr,
116 descSetLayout));
117 if (result != VK_SUCCESS) {
118 return false;
119 }
120
121 *descCountPerSet = visibilities.count();
Greg Danielf32fec12020-09-08 13:05:32 -0400122 } else if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500123 static constexpr int kUniformDescPerSet = 1;
124 SkASSERT(kUniformDescPerSet == visibilities.count());
125 // Create Uniform Buffer Descriptor
126 VkDescriptorSetLayoutBinding dsUniBinding;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500127 dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
128 dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
129 dsUniBinding.descriptorCount = 1;
130 dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
131 dsUniBinding.pImmutableSamplers = nullptr;
132
133 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500134 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
135 uniformLayoutCreateInfo.pNext = nullptr;
136 uniformLayoutCreateInfo.flags = 0;
137 uniformLayoutCreateInfo.bindingCount = 1;
138 uniformLayoutCreateInfo.pBindings = &dsUniBinding;
139
140#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
141 // skia:8713
142 __lsan::ScopedDisabler lsanDisabler;
143#endif
144 VkResult result;
145 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
146 &uniformLayoutCreateInfo,
147 nullptr,
148 descSetLayout));
149 if (result != VK_SUCCESS) {
150 return false;
151 }
152
153 *descCountPerSet = kUniformDescPerSet;
Greg Danielf32fec12020-09-08 13:05:32 -0400154 } else {
155 SkASSERT(type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
156 static constexpr int kInputDescPerSet = 1;
157 SkASSERT(kInputDescPerSet == visibilities.count());
158
159 // Create Input Buffer Descriptor
160 VkDescriptorSetLayoutBinding dsInpuBinding;
Greg Danielf32fec12020-09-08 13:05:32 -0400161 dsInpuBinding.binding = 0;
162 dsInpuBinding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
163 dsInpuBinding.descriptorCount = 1;
164 SkASSERT(visibilities[0] == kFragment_GrShaderFlag);
165 dsInpuBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
166 dsInpuBinding.pImmutableSamplers = nullptr;
167
Greg Daniel37fd6582020-09-14 12:36:09 -0400168 VkDescriptorSetLayoutCreateInfo inputLayoutCreateInfo;
169 inputLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
170 inputLayoutCreateInfo.pNext = nullptr;
171 inputLayoutCreateInfo.flags = 0;
172 inputLayoutCreateInfo.bindingCount = 1;
173 inputLayoutCreateInfo.pBindings = &dsInpuBinding;
Greg Danielf32fec12020-09-08 13:05:32 -0400174
175#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
176 // skia:8713
177 __lsan::ScopedDisabler lsanDisabler;
178#endif
179 VkResult result;
Greg Daniel37fd6582020-09-14 12:36:09 -0400180 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
181 &inputLayoutCreateInfo,
182 nullptr, descSetLayout));
Greg Danielf32fec12020-09-08 13:05:32 -0400183 if (result != VK_SUCCESS) {
184 return false;
185 }
186
187 *descCountPerSet = kInputDescPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500188 }
189 return true;
190}
191
192GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
Greg Daniel7a82edf2018-12-04 10:54:34 -0500193 GrVkGpu* gpu, VkDescriptorType type,
194 const SkTArray<uint32_t>& visibilities,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500195 const SkTArray<const GrVkSampler*>& immutableSamplers) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500196#ifdef SK_DEBUG
197 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
198 SkASSERT(visibilities.count() == immutableSamplers.count());
199 } else {
200 SkASSERT(immutableSamplers.count() == 0);
201 }
202#endif
Greg Daniel9b63dc82019-11-06 09:21:55 -0500203
204 VkDescriptorSetLayout descSetLayout;
205 uint32_t descCountPerSet;
206 if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
207 &descCountPerSet)) {
208 return nullptr;
209 }
210 return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
211 immutableSamplers);
212}
213
214GrVkDescriptorSetManager::GrVkDescriptorSetManager(
215 GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
216 uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
217 const SkTArray<const GrVkSampler*>& immutableSamplers)
218 : fPoolManager(descSetLayout, type, descCountPerSet) {
Greg Daniel18f96022017-05-04 15:09:03 -0400219 for (int i = 0; i < visibilities.count(); ++i) {
220 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -0700221 }
Greg Daniel7a82edf2018-12-04 10:54:34 -0500222 for (int i = 0; i < immutableSamplers.count(); ++i) {
223 const GrVkSampler* sampler = immutableSamplers[i];
224 if (sampler) {
225 sampler->ref();
226 }
227 fImmutableSamplers.push_back(sampler);
228 }
egdaniel4d866df2016-08-25 13:52:00 -0700229}
230
egdaniela95220d2016-07-21 11:50:37 -0700231const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
232 const Handle& handle) {
233 const GrVkDescriptorSet* ds = nullptr;
234 int count = fFreeSets.count();
235 if (count > 0) {
236 ds = fFreeSets[count - 1];
237 fFreeSets.removeShuffle(count - 1);
238 } else {
239 VkDescriptorSet vkDS;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500240 if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
241 return nullptr;
242 }
egdaniela95220d2016-07-21 11:50:37 -0700243
Jim Van Verth5082df12020-03-11 16:14:51 -0400244 ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
egdaniela95220d2016-07-21 11:50:37 -0700245 }
246 SkASSERT(ds);
247 return ds;
248}
249
250void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
251 SkASSERT(descSet);
252 fFreeSets.push_back(descSet);
253}
254
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500255void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700256 fPoolManager.freeGPUResources(gpu);
257
258 for (int i = 0; i < fFreeSets.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400259 fFreeSets[i]->unref();
egdaniela95220d2016-07-21 11:50:37 -0700260 }
261 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500262
263 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
264 if (fImmutableSamplers[i]) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400265 fImmutableSamplers[i]->unref();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500266 }
267 }
268 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700269}
270
egdaniel707bbd62016-07-26 07:19:47 -0700271bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
272 const GrVkUniformHandler* uniHandler) const {
273 SkASSERT(uniHandler);
274 if (type != fPoolManager.fDescType) {
275 return false;
276 }
277
Brian Salomon662ea4b2018-07-12 14:53:49 -0400278 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
279 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
280 return false;
281 }
282 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500283 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
284 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700285 return false;
286 }
egdaniel707bbd62016-07-26 07:19:47 -0700287 }
288 return true;
289}
290
Greg Daniela8c32102020-12-30 15:09:32 -0500291bool GrVkDescriptorSetManager::isZeroSampler() const {
292 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER != fPoolManager.fDescType) {
293 return false;
294 }
295 if (fBindingVisibilities.count()) {
296 return false;
297 }
298 return true;
299}
300
egdaniela95220d2016-07-21 11:50:37 -0700301////////////////////////////////////////////////////////////////////////////////
302
egdaniel707bbd62016-07-26 07:19:47 -0700303GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
Greg Daniel9b63dc82019-11-06 09:21:55 -0500304 VkDescriptorSetLayout layout,
egdaniel707bbd62016-07-26 07:19:47 -0700305 VkDescriptorType type,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500306 uint32_t descCountPerSet)
307 : fDescLayout(layout)
308 , fDescType(type)
309 , fDescCountPerSet(descCountPerSet)
310 , fMaxDescriptors(kStartNumDescriptors)
egdaniel4d866df2016-08-25 13:52:00 -0700311 , fCurrentDescriptorCount(0)
312 , fPool(nullptr) {
egdaniel707bbd62016-07-26 07:19:47 -0700313}
314
Greg Daniel9b63dc82019-11-06 09:21:55 -0500315bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700316 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400317 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700318 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
319 if (newPoolSize < kMaxDescriptors) {
320 fMaxDescriptors = newPoolSize;
321 } else {
322 fMaxDescriptors = kMaxDescriptors;
323 }
324
325 }
326 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
327 fMaxDescriptors);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500328 return SkToBool(fPool);
egdaniela95220d2016-07-21 11:50:37 -0700329}
330
Greg Daniel9b63dc82019-11-06 09:21:55 -0500331bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
egdaniela95220d2016-07-21 11:50:37 -0700332 VkDescriptorSet* ds) {
333 if (!fMaxDescriptors) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500334 return false;
egdaniela95220d2016-07-21 11:50:37 -0700335 }
336 fCurrentDescriptorCount += fDescCountPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500337 if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
338 if (!this->getNewPool(gpu) ) {
339 return false;
340 }
egdaniela95220d2016-07-21 11:50:37 -0700341 fCurrentDescriptorCount = fDescCountPerSet;
342 }
343
344 VkDescriptorSetAllocateInfo dsAllocateInfo;
345 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
346 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
347 dsAllocateInfo.pNext = nullptr;
348 dsAllocateInfo.descriptorPool = fPool->descPool();
349 dsAllocateInfo.descriptorSetCount = 1;
350 dsAllocateInfo.pSetLayouts = &fDescLayout;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500351 VkResult result;
352 GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
353 &dsAllocateInfo,
354 ds));
355 return result == VK_SUCCESS;
egdaniela95220d2016-07-21 11:50:37 -0700356}
357
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500358void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700359 if (fDescLayout) {
360 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
361 nullptr));
362 fDescLayout = VK_NULL_HANDLE;
363 }
egdaniela95220d2016-07-21 11:50:37 -0700364
365 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400366 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700367 fPool = nullptr;
368 }
369}
370