blob: d1882b40d77fb307a6ea271fe479f3b56ce813a9 [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkDescriptorSetManager.h"
egdaniela95220d2016-07-21 11:50:37 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/vk/GrVkDescriptorPool.h"
11#include "src/gpu/vk/GrVkDescriptorSet.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040020 SkSTArray<1, uint32_t> visibilities;
21 uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
Ethan Nicholas0be34802019-08-15 12:36:58 -040022 visibilities.push_back(stages);
Greg Daniel7a82edf2018-12-04 10:54:34 -050023 SkTArray<const GrVkSampler*> samplers;
Greg Daniel9b63dc82019-11-06 09:21:55 -050024 return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040025}
26
27GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
28 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
29 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050030 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040031 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
32 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
33 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050034 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070035 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050036 return Create(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040037}
38
Greg Daniela8c32102020-12-30 15:09:32 -050039GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateZeroSamplerManager(GrVkGpu* gpu) {
40 SkTArray<uint32_t> visibilities;
41 SkTArray<const GrVkSampler*> immutableSamplers;
42 return Create(gpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, visibilities, immutableSamplers);
43}
44
Greg Danielf32fec12020-09-08 13:05:32 -040045GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateInputManager(GrVkGpu* gpu) {
46 SkSTArray<1, uint32_t> visibilities;
47 visibilities.push_back(kFragment_GrShaderFlag);
48 SkTArray<const GrVkSampler*> samplers;
49 return Create(gpu, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, visibilities, samplers);
50}
51
Greg Daniel9b63dc82019-11-06 09:21:55 -050052VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
53 VkShaderStageFlags flags = 0;
54
55 if (visibility & kVertex_GrShaderFlag) {
56 flags |= VK_SHADER_STAGE_VERTEX_BIT;
57 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050058 if (visibility & kFragment_GrShaderFlag) {
59 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
60 }
61 return flags;
62}
63
64static bool get_layout_and_desc_count(GrVkGpu* gpu,
65 VkDescriptorType type,
66 const SkTArray<uint32_t>& visibilities,
67 const SkTArray<const GrVkSampler*>& immutableSamplers,
68 VkDescriptorSetLayout* descSetLayout,
69 uint32_t* descCountPerSet) {
70 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
71 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
72 uint32_t numBindings = visibilities.count();
73 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
74 new VkDescriptorSetLayoutBinding[numBindings]);
Greg Danielbee1b552021-06-28 17:47:14 -040075 *descCountPerSet = 0;
Greg Daniel9b63dc82019-11-06 09:21:55 -050076 for (uint32_t i = 0; i < numBindings; ++i) {
77 uint32_t visibility = visibilities[i];
78 dsSamplerBindings[i].binding = i;
79 dsSamplerBindings[i].descriptorType = type;
80 dsSamplerBindings[i].descriptorCount = 1;
81 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
82 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
83 if (immutableSamplers[i]) {
Greg Danielbee1b552021-06-28 17:47:14 -040084 (*descCountPerSet) += gpu->vkCaps().ycbcrCombinedImageSamplerDescriptorCount();
Greg Daniel9b63dc82019-11-06 09:21:55 -050085 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
86 } else {
Greg Danielbee1b552021-06-28 17:47:14 -040087 (*descCountPerSet)++;
Greg Daniel9b63dc82019-11-06 09:21:55 -050088 dsSamplerBindings[i].pImmutableSamplers = nullptr;
89 }
90 }
91 }
92
93 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
94 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
95 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
96 dsSamplerLayoutCreateInfo.pNext = nullptr;
97 dsSamplerLayoutCreateInfo.flags = 0;
98 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
99 // Setting to nullptr fixes an error in the param checker validation layer. Even though
100 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
101 // null.
102 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
103
104#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
105 // skia:8713
106 __lsan::ScopedDisabler lsanDisabler;
107#endif
108 VkResult result;
109 GR_VK_CALL_RESULT(gpu, result,
110 CreateDescriptorSetLayout(gpu->device(),
111 &dsSamplerLayoutCreateInfo,
112 nullptr,
113 descSetLayout));
114 if (result != VK_SUCCESS) {
115 return false;
116 }
Greg Danielf32fec12020-09-08 13:05:32 -0400117 } else if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500118 static constexpr int kUniformDescPerSet = 1;
119 SkASSERT(kUniformDescPerSet == visibilities.count());
120 // Create Uniform Buffer Descriptor
121 VkDescriptorSetLayoutBinding dsUniBinding;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500122 dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
123 dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
124 dsUniBinding.descriptorCount = 1;
125 dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
126 dsUniBinding.pImmutableSamplers = nullptr;
127
128 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500129 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
130 uniformLayoutCreateInfo.pNext = nullptr;
131 uniformLayoutCreateInfo.flags = 0;
132 uniformLayoutCreateInfo.bindingCount = 1;
133 uniformLayoutCreateInfo.pBindings = &dsUniBinding;
134
135#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
136 // skia:8713
137 __lsan::ScopedDisabler lsanDisabler;
138#endif
139 VkResult result;
140 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
141 &uniformLayoutCreateInfo,
142 nullptr,
143 descSetLayout));
144 if (result != VK_SUCCESS) {
145 return false;
146 }
147
148 *descCountPerSet = kUniformDescPerSet;
Greg Danielf32fec12020-09-08 13:05:32 -0400149 } else {
150 SkASSERT(type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
151 static constexpr int kInputDescPerSet = 1;
152 SkASSERT(kInputDescPerSet == visibilities.count());
153
154 // Create Input Buffer Descriptor
155 VkDescriptorSetLayoutBinding dsInpuBinding;
Greg Danielf32fec12020-09-08 13:05:32 -0400156 dsInpuBinding.binding = 0;
157 dsInpuBinding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
158 dsInpuBinding.descriptorCount = 1;
159 SkASSERT(visibilities[0] == kFragment_GrShaderFlag);
160 dsInpuBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
161 dsInpuBinding.pImmutableSamplers = nullptr;
162
Greg Daniel37fd6582020-09-14 12:36:09 -0400163 VkDescriptorSetLayoutCreateInfo inputLayoutCreateInfo;
164 inputLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
165 inputLayoutCreateInfo.pNext = nullptr;
166 inputLayoutCreateInfo.flags = 0;
167 inputLayoutCreateInfo.bindingCount = 1;
168 inputLayoutCreateInfo.pBindings = &dsInpuBinding;
Greg Danielf32fec12020-09-08 13:05:32 -0400169
170#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
171 // skia:8713
172 __lsan::ScopedDisabler lsanDisabler;
173#endif
174 VkResult result;
Greg Daniel37fd6582020-09-14 12:36:09 -0400175 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
176 &inputLayoutCreateInfo,
177 nullptr, descSetLayout));
Greg Danielf32fec12020-09-08 13:05:32 -0400178 if (result != VK_SUCCESS) {
179 return false;
180 }
181
182 *descCountPerSet = kInputDescPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500183 }
184 return true;
185}
186
187GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
Greg Daniel7a82edf2018-12-04 10:54:34 -0500188 GrVkGpu* gpu, VkDescriptorType type,
189 const SkTArray<uint32_t>& visibilities,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500190 const SkTArray<const GrVkSampler*>& immutableSamplers) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500191#ifdef SK_DEBUG
192 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
193 SkASSERT(visibilities.count() == immutableSamplers.count());
194 } else {
195 SkASSERT(immutableSamplers.count() == 0);
196 }
197#endif
Greg Daniel9b63dc82019-11-06 09:21:55 -0500198
199 VkDescriptorSetLayout descSetLayout;
200 uint32_t descCountPerSet;
201 if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
202 &descCountPerSet)) {
203 return nullptr;
204 }
205 return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
206 immutableSamplers);
207}
208
209GrVkDescriptorSetManager::GrVkDescriptorSetManager(
210 GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
211 uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
212 const SkTArray<const GrVkSampler*>& immutableSamplers)
213 : fPoolManager(descSetLayout, type, descCountPerSet) {
Greg Daniel18f96022017-05-04 15:09:03 -0400214 for (int i = 0; i < visibilities.count(); ++i) {
215 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -0700216 }
Greg Daniel7a82edf2018-12-04 10:54:34 -0500217 for (int i = 0; i < immutableSamplers.count(); ++i) {
218 const GrVkSampler* sampler = immutableSamplers[i];
219 if (sampler) {
220 sampler->ref();
221 }
222 fImmutableSamplers.push_back(sampler);
223 }
egdaniel4d866df2016-08-25 13:52:00 -0700224}
225
egdaniela95220d2016-07-21 11:50:37 -0700226const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
227 const Handle& handle) {
228 const GrVkDescriptorSet* ds = nullptr;
229 int count = fFreeSets.count();
230 if (count > 0) {
231 ds = fFreeSets[count - 1];
232 fFreeSets.removeShuffle(count - 1);
233 } else {
234 VkDescriptorSet vkDS;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500235 if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
236 return nullptr;
237 }
egdaniela95220d2016-07-21 11:50:37 -0700238
Jim Van Verth5082df12020-03-11 16:14:51 -0400239 ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
egdaniela95220d2016-07-21 11:50:37 -0700240 }
241 SkASSERT(ds);
242 return ds;
243}
244
245void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
246 SkASSERT(descSet);
247 fFreeSets.push_back(descSet);
248}
249
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500250void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700251 fPoolManager.freeGPUResources(gpu);
252
253 for (int i = 0; i < fFreeSets.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400254 fFreeSets[i]->unref();
egdaniela95220d2016-07-21 11:50:37 -0700255 }
256 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500257
258 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
259 if (fImmutableSamplers[i]) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400260 fImmutableSamplers[i]->unref();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500261 }
262 }
263 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700264}
265
egdaniel707bbd62016-07-26 07:19:47 -0700266bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
267 const GrVkUniformHandler* uniHandler) const {
268 SkASSERT(uniHandler);
269 if (type != fPoolManager.fDescType) {
270 return false;
271 }
272
Brian Salomon662ea4b2018-07-12 14:53:49 -0400273 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
274 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
275 return false;
276 }
277 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500278 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
279 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700280 return false;
281 }
egdaniel707bbd62016-07-26 07:19:47 -0700282 }
283 return true;
284}
285
Greg Daniela8c32102020-12-30 15:09:32 -0500286bool GrVkDescriptorSetManager::isZeroSampler() const {
287 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER != fPoolManager.fDescType) {
288 return false;
289 }
290 if (fBindingVisibilities.count()) {
291 return false;
292 }
293 return true;
294}
295
egdaniela95220d2016-07-21 11:50:37 -0700296////////////////////////////////////////////////////////////////////////////////
297
egdaniel707bbd62016-07-26 07:19:47 -0700298GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
Greg Daniel9b63dc82019-11-06 09:21:55 -0500299 VkDescriptorSetLayout layout,
egdaniel707bbd62016-07-26 07:19:47 -0700300 VkDescriptorType type,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500301 uint32_t descCountPerSet)
302 : fDescLayout(layout)
303 , fDescType(type)
304 , fDescCountPerSet(descCountPerSet)
305 , fMaxDescriptors(kStartNumDescriptors)
egdaniel4d866df2016-08-25 13:52:00 -0700306 , fCurrentDescriptorCount(0)
307 , fPool(nullptr) {
egdaniel707bbd62016-07-26 07:19:47 -0700308}
309
Greg Daniel9b63dc82019-11-06 09:21:55 -0500310bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700311 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400312 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700313 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
314 if (newPoolSize < kMaxDescriptors) {
315 fMaxDescriptors = newPoolSize;
316 } else {
317 fMaxDescriptors = kMaxDescriptors;
318 }
319
320 }
321 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
322 fMaxDescriptors);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500323 return SkToBool(fPool);
egdaniela95220d2016-07-21 11:50:37 -0700324}
325
Greg Daniel9b63dc82019-11-06 09:21:55 -0500326bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
egdaniela95220d2016-07-21 11:50:37 -0700327 VkDescriptorSet* ds) {
328 if (!fMaxDescriptors) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500329 return false;
egdaniela95220d2016-07-21 11:50:37 -0700330 }
331 fCurrentDescriptorCount += fDescCountPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500332 if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
333 if (!this->getNewPool(gpu) ) {
334 return false;
335 }
egdaniela95220d2016-07-21 11:50:37 -0700336 fCurrentDescriptorCount = fDescCountPerSet;
337 }
338
339 VkDescriptorSetAllocateInfo dsAllocateInfo;
340 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
341 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
342 dsAllocateInfo.pNext = nullptr;
343 dsAllocateInfo.descriptorPool = fPool->descPool();
344 dsAllocateInfo.descriptorSetCount = 1;
345 dsAllocateInfo.pSetLayouts = &fDescLayout;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500346 VkResult result;
347 GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
348 &dsAllocateInfo,
349 ds));
350 return result == VK_SUCCESS;
egdaniela95220d2016-07-21 11:50:37 -0700351}
352
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500353void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700354 if (fDescLayout) {
355 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
356 nullptr));
357 fDescLayout = VK_NULL_HANDLE;
358 }
egdaniela95220d2016-07-21 11:50:37 -0700359
360 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400361 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700362 fPool = nullptr;
363 }
364}
365