blob: b3216efc169736e15984101e3a86e4cbf4dc77e6 [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkDescriptorSetManager.h"
egdaniela95220d2016-07-21 11:50:37 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/vk/GrVkDescriptorPool.h"
11#include "src/gpu/vk/GrVkDescriptorSet.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040020 SkSTArray<1, uint32_t> visibilities;
21 uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040022 if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040023 stages |= kGeometry_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040024 }
Ethan Nicholas0be34802019-08-15 12:36:58 -040025 visibilities.push_back(stages);
Greg Daniel7a82edf2018-12-04 10:54:34 -050026 SkTArray<const GrVkSampler*> samplers;
Greg Daniel9b63dc82019-11-06 09:21:55 -050027 return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040028}
29
30GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
31 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
32 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050033 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040034 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
35 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
36 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050037 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070038 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050039 return Create(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040040}
41
Greg Danielf32fec12020-09-08 13:05:32 -040042GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateInputManager(GrVkGpu* gpu) {
43 SkSTArray<1, uint32_t> visibilities;
44 visibilities.push_back(kFragment_GrShaderFlag);
45 SkTArray<const GrVkSampler*> samplers;
46 return Create(gpu, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, visibilities, samplers);
47}
48
Greg Daniel9b63dc82019-11-06 09:21:55 -050049VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
50 VkShaderStageFlags flags = 0;
51
52 if (visibility & kVertex_GrShaderFlag) {
53 flags |= VK_SHADER_STAGE_VERTEX_BIT;
54 }
55 if (visibility & kGeometry_GrShaderFlag) {
56 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
57 }
58 if (visibility & kFragment_GrShaderFlag) {
59 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
60 }
61 return flags;
62}
63
64static bool get_layout_and_desc_count(GrVkGpu* gpu,
65 VkDescriptorType type,
66 const SkTArray<uint32_t>& visibilities,
67 const SkTArray<const GrVkSampler*>& immutableSamplers,
68 VkDescriptorSetLayout* descSetLayout,
69 uint32_t* descCountPerSet) {
70 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
71 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
72 uint32_t numBindings = visibilities.count();
73 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
74 new VkDescriptorSetLayoutBinding[numBindings]);
75 for (uint32_t i = 0; i < numBindings; ++i) {
76 uint32_t visibility = visibilities[i];
77 dsSamplerBindings[i].binding = i;
78 dsSamplerBindings[i].descriptorType = type;
79 dsSamplerBindings[i].descriptorCount = 1;
80 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
81 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
82 if (immutableSamplers[i]) {
83 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
84 } else {
85 dsSamplerBindings[i].pImmutableSamplers = nullptr;
86 }
87 }
88 }
89
90 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
91 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
92 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
93 dsSamplerLayoutCreateInfo.pNext = nullptr;
94 dsSamplerLayoutCreateInfo.flags = 0;
95 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
96 // Setting to nullptr fixes an error in the param checker validation layer. Even though
97 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
98 // null.
99 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
100
101#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
102 // skia:8713
103 __lsan::ScopedDisabler lsanDisabler;
104#endif
105 VkResult result;
106 GR_VK_CALL_RESULT(gpu, result,
107 CreateDescriptorSetLayout(gpu->device(),
108 &dsSamplerLayoutCreateInfo,
109 nullptr,
110 descSetLayout));
111 if (result != VK_SUCCESS) {
112 return false;
113 }
114
115 *descCountPerSet = visibilities.count();
Greg Danielf32fec12020-09-08 13:05:32 -0400116 } else if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500117 static constexpr int kUniformDescPerSet = 1;
118 SkASSERT(kUniformDescPerSet == visibilities.count());
119 // Create Uniform Buffer Descriptor
120 VkDescriptorSetLayoutBinding dsUniBinding;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500121 dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
122 dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
123 dsUniBinding.descriptorCount = 1;
124 dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
125 dsUniBinding.pImmutableSamplers = nullptr;
126
127 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500128 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
129 uniformLayoutCreateInfo.pNext = nullptr;
130 uniformLayoutCreateInfo.flags = 0;
131 uniformLayoutCreateInfo.bindingCount = 1;
132 uniformLayoutCreateInfo.pBindings = &dsUniBinding;
133
134#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
135 // skia:8713
136 __lsan::ScopedDisabler lsanDisabler;
137#endif
138 VkResult result;
139 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
140 &uniformLayoutCreateInfo,
141 nullptr,
142 descSetLayout));
143 if (result != VK_SUCCESS) {
144 return false;
145 }
146
147 *descCountPerSet = kUniformDescPerSet;
Greg Danielf32fec12020-09-08 13:05:32 -0400148 } else {
149 SkASSERT(type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
150 static constexpr int kInputDescPerSet = 1;
151 SkASSERT(kInputDescPerSet == visibilities.count());
152
153 // Create Input Buffer Descriptor
154 VkDescriptorSetLayoutBinding dsInpuBinding;
Greg Danielf32fec12020-09-08 13:05:32 -0400155 dsInpuBinding.binding = 0;
156 dsInpuBinding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
157 dsInpuBinding.descriptorCount = 1;
158 SkASSERT(visibilities[0] == kFragment_GrShaderFlag);
159 dsInpuBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
160 dsInpuBinding.pImmutableSamplers = nullptr;
161
Greg Daniel37fd6582020-09-14 12:36:09 -0400162 VkDescriptorSetLayoutCreateInfo inputLayoutCreateInfo;
163 inputLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
164 inputLayoutCreateInfo.pNext = nullptr;
165 inputLayoutCreateInfo.flags = 0;
166 inputLayoutCreateInfo.bindingCount = 1;
167 inputLayoutCreateInfo.pBindings = &dsInpuBinding;
Greg Danielf32fec12020-09-08 13:05:32 -0400168
169#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
170 // skia:8713
171 __lsan::ScopedDisabler lsanDisabler;
172#endif
173 VkResult result;
Greg Daniel37fd6582020-09-14 12:36:09 -0400174 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
175 &inputLayoutCreateInfo,
176 nullptr, descSetLayout));
Greg Danielf32fec12020-09-08 13:05:32 -0400177 if (result != VK_SUCCESS) {
178 return false;
179 }
180
181 *descCountPerSet = kInputDescPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500182 }
183 return true;
184}
185
186GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
Greg Daniel7a82edf2018-12-04 10:54:34 -0500187 GrVkGpu* gpu, VkDescriptorType type,
188 const SkTArray<uint32_t>& visibilities,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500189 const SkTArray<const GrVkSampler*>& immutableSamplers) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500190#ifdef SK_DEBUG
191 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
192 SkASSERT(visibilities.count() == immutableSamplers.count());
193 } else {
194 SkASSERT(immutableSamplers.count() == 0);
195 }
196#endif
Greg Daniel9b63dc82019-11-06 09:21:55 -0500197
198 VkDescriptorSetLayout descSetLayout;
199 uint32_t descCountPerSet;
200 if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
201 &descCountPerSet)) {
202 return nullptr;
203 }
204 return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
205 immutableSamplers);
206}
207
208GrVkDescriptorSetManager::GrVkDescriptorSetManager(
209 GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
210 uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
211 const SkTArray<const GrVkSampler*>& immutableSamplers)
212 : fPoolManager(descSetLayout, type, descCountPerSet) {
Greg Daniel18f96022017-05-04 15:09:03 -0400213 for (int i = 0; i < visibilities.count(); ++i) {
214 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -0700215 }
Greg Daniel7a82edf2018-12-04 10:54:34 -0500216 for (int i = 0; i < immutableSamplers.count(); ++i) {
217 const GrVkSampler* sampler = immutableSamplers[i];
218 if (sampler) {
219 sampler->ref();
220 }
221 fImmutableSamplers.push_back(sampler);
222 }
egdaniel4d866df2016-08-25 13:52:00 -0700223}
224
egdaniela95220d2016-07-21 11:50:37 -0700225const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
226 const Handle& handle) {
227 const GrVkDescriptorSet* ds = nullptr;
228 int count = fFreeSets.count();
229 if (count > 0) {
230 ds = fFreeSets[count - 1];
231 fFreeSets.removeShuffle(count - 1);
232 } else {
233 VkDescriptorSet vkDS;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500234 if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
235 return nullptr;
236 }
egdaniela95220d2016-07-21 11:50:37 -0700237
Jim Van Verth5082df12020-03-11 16:14:51 -0400238 ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
egdaniela95220d2016-07-21 11:50:37 -0700239 }
240 SkASSERT(ds);
241 return ds;
242}
243
244void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
245 SkASSERT(descSet);
246 fFreeSets.push_back(descSet);
247}
248
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500249void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700250 fPoolManager.freeGPUResources(gpu);
251
252 for (int i = 0; i < fFreeSets.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400253 fFreeSets[i]->unref();
egdaniela95220d2016-07-21 11:50:37 -0700254 }
255 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500256
257 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
258 if (fImmutableSamplers[i]) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400259 fImmutableSamplers[i]->unref();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500260 }
261 }
262 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700263}
264
egdaniel707bbd62016-07-26 07:19:47 -0700265bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
266 const GrVkUniformHandler* uniHandler) const {
267 SkASSERT(uniHandler);
268 if (type != fPoolManager.fDescType) {
269 return false;
270 }
271
Brian Salomon662ea4b2018-07-12 14:53:49 -0400272 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
273 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
274 return false;
275 }
276 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500277 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
278 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700279 return false;
280 }
egdaniel707bbd62016-07-26 07:19:47 -0700281 }
282 return true;
283}
284
egdaniela95220d2016-07-21 11:50:37 -0700285////////////////////////////////////////////////////////////////////////////////
286
egdaniel707bbd62016-07-26 07:19:47 -0700287GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
Greg Daniel9b63dc82019-11-06 09:21:55 -0500288 VkDescriptorSetLayout layout,
egdaniel707bbd62016-07-26 07:19:47 -0700289 VkDescriptorType type,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500290 uint32_t descCountPerSet)
291 : fDescLayout(layout)
292 , fDescType(type)
293 , fDescCountPerSet(descCountPerSet)
294 , fMaxDescriptors(kStartNumDescriptors)
egdaniel4d866df2016-08-25 13:52:00 -0700295 , fCurrentDescriptorCount(0)
296 , fPool(nullptr) {
egdaniel707bbd62016-07-26 07:19:47 -0700297}
298
Greg Daniel9b63dc82019-11-06 09:21:55 -0500299bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700300 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400301 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700302 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
303 if (newPoolSize < kMaxDescriptors) {
304 fMaxDescriptors = newPoolSize;
305 } else {
306 fMaxDescriptors = kMaxDescriptors;
307 }
308
309 }
310 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
311 fMaxDescriptors);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500312 return SkToBool(fPool);
egdaniela95220d2016-07-21 11:50:37 -0700313}
314
Greg Daniel9b63dc82019-11-06 09:21:55 -0500315bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
egdaniela95220d2016-07-21 11:50:37 -0700316 VkDescriptorSet* ds) {
317 if (!fMaxDescriptors) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500318 return false;
egdaniela95220d2016-07-21 11:50:37 -0700319 }
320 fCurrentDescriptorCount += fDescCountPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500321 if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
322 if (!this->getNewPool(gpu) ) {
323 return false;
324 }
egdaniela95220d2016-07-21 11:50:37 -0700325 fCurrentDescriptorCount = fDescCountPerSet;
326 }
327
328 VkDescriptorSetAllocateInfo dsAllocateInfo;
329 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
330 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
331 dsAllocateInfo.pNext = nullptr;
332 dsAllocateInfo.descriptorPool = fPool->descPool();
333 dsAllocateInfo.descriptorSetCount = 1;
334 dsAllocateInfo.pSetLayouts = &fDescLayout;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500335 VkResult result;
336 GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
337 &dsAllocateInfo,
338 ds));
339 return result == VK_SUCCESS;
egdaniela95220d2016-07-21 11:50:37 -0700340}
341
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500342void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700343 if (fDescLayout) {
344 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
345 nullptr));
346 fDescLayout = VK_NULL_HANDLE;
347 }
egdaniela95220d2016-07-21 11:50:37 -0700348
349 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400350 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700351 fPool = nullptr;
352 }
353}
354