blob: ef52133b8a29b0e5d1421778c8f5b201d96cc54e [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkDescriptorSetManager.h"
egdaniela95220d2016-07-21 11:50:37 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/vk/GrVkDescriptorPool.h"
11#include "src/gpu/vk/GrVkDescriptorSet.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040020 SkSTArray<1, uint32_t> visibilities;
21 uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040022 if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040023 stages |= kGeometry_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040024 }
Ethan Nicholas0be34802019-08-15 12:36:58 -040025 visibilities.push_back(stages);
Greg Daniel7a82edf2018-12-04 10:54:34 -050026 SkTArray<const GrVkSampler*> samplers;
Greg Daniel9b63dc82019-11-06 09:21:55 -050027 return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040028}
29
30GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
31 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
32 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050033 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040034 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
35 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
36 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050037 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070038 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050039 return Create(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040040}
41
Greg Daniel9b63dc82019-11-06 09:21:55 -050042VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
43 VkShaderStageFlags flags = 0;
44
45 if (visibility & kVertex_GrShaderFlag) {
46 flags |= VK_SHADER_STAGE_VERTEX_BIT;
47 }
48 if (visibility & kGeometry_GrShaderFlag) {
49 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
50 }
51 if (visibility & kFragment_GrShaderFlag) {
52 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
53 }
54 return flags;
55}
56
57static bool get_layout_and_desc_count(GrVkGpu* gpu,
58 VkDescriptorType type,
59 const SkTArray<uint32_t>& visibilities,
60 const SkTArray<const GrVkSampler*>& immutableSamplers,
61 VkDescriptorSetLayout* descSetLayout,
62 uint32_t* descCountPerSet) {
63 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
64 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
65 uint32_t numBindings = visibilities.count();
66 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
67 new VkDescriptorSetLayoutBinding[numBindings]);
68 for (uint32_t i = 0; i < numBindings; ++i) {
69 uint32_t visibility = visibilities[i];
70 dsSamplerBindings[i].binding = i;
71 dsSamplerBindings[i].descriptorType = type;
72 dsSamplerBindings[i].descriptorCount = 1;
73 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
74 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
75 if (immutableSamplers[i]) {
76 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
77 } else {
78 dsSamplerBindings[i].pImmutableSamplers = nullptr;
79 }
80 }
81 }
82
83 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
84 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
85 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
86 dsSamplerLayoutCreateInfo.pNext = nullptr;
87 dsSamplerLayoutCreateInfo.flags = 0;
88 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
89 // Setting to nullptr fixes an error in the param checker validation layer. Even though
90 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
91 // null.
92 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
93
94#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
95 // skia:8713
96 __lsan::ScopedDisabler lsanDisabler;
97#endif
98 VkResult result;
99 GR_VK_CALL_RESULT(gpu, result,
100 CreateDescriptorSetLayout(gpu->device(),
101 &dsSamplerLayoutCreateInfo,
102 nullptr,
103 descSetLayout));
104 if (result != VK_SUCCESS) {
105 return false;
106 }
107
108 *descCountPerSet = visibilities.count();
109 } else {
110 SkASSERT(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type);
111 static constexpr int kUniformDescPerSet = 1;
112 SkASSERT(kUniformDescPerSet == visibilities.count());
113 // Create Uniform Buffer Descriptor
114 VkDescriptorSetLayoutBinding dsUniBinding;
115 memset(&dsUniBinding, 0, sizeof(dsUniBinding));
116 dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
117 dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
118 dsUniBinding.descriptorCount = 1;
119 dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
120 dsUniBinding.pImmutableSamplers = nullptr;
121
122 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
123 memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
124 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
125 uniformLayoutCreateInfo.pNext = nullptr;
126 uniformLayoutCreateInfo.flags = 0;
127 uniformLayoutCreateInfo.bindingCount = 1;
128 uniformLayoutCreateInfo.pBindings = &dsUniBinding;
129
130#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
131 // skia:8713
132 __lsan::ScopedDisabler lsanDisabler;
133#endif
134 VkResult result;
135 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
136 &uniformLayoutCreateInfo,
137 nullptr,
138 descSetLayout));
139 if (result != VK_SUCCESS) {
140 return false;
141 }
142
143 *descCountPerSet = kUniformDescPerSet;
144 }
145 return true;
146}
147
148GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
Greg Daniel7a82edf2018-12-04 10:54:34 -0500149 GrVkGpu* gpu, VkDescriptorType type,
150 const SkTArray<uint32_t>& visibilities,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500151 const SkTArray<const GrVkSampler*>& immutableSamplers) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500152#ifdef SK_DEBUG
153 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
154 SkASSERT(visibilities.count() == immutableSamplers.count());
155 } else {
156 SkASSERT(immutableSamplers.count() == 0);
157 }
158#endif
Greg Daniel9b63dc82019-11-06 09:21:55 -0500159
160 VkDescriptorSetLayout descSetLayout;
161 uint32_t descCountPerSet;
162 if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
163 &descCountPerSet)) {
164 return nullptr;
165 }
166 return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
167 immutableSamplers);
168}
169
170GrVkDescriptorSetManager::GrVkDescriptorSetManager(
171 GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
172 uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
173 const SkTArray<const GrVkSampler*>& immutableSamplers)
174 : fPoolManager(descSetLayout, type, descCountPerSet) {
Greg Daniel18f96022017-05-04 15:09:03 -0400175 for (int i = 0; i < visibilities.count(); ++i) {
176 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -0700177 }
Greg Daniel7a82edf2018-12-04 10:54:34 -0500178 for (int i = 0; i < immutableSamplers.count(); ++i) {
179 const GrVkSampler* sampler = immutableSamplers[i];
180 if (sampler) {
181 sampler->ref();
182 }
183 fImmutableSamplers.push_back(sampler);
184 }
egdaniel4d866df2016-08-25 13:52:00 -0700185}
186
egdaniela95220d2016-07-21 11:50:37 -0700187const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
188 const Handle& handle) {
189 const GrVkDescriptorSet* ds = nullptr;
190 int count = fFreeSets.count();
191 if (count > 0) {
192 ds = fFreeSets[count - 1];
193 fFreeSets.removeShuffle(count - 1);
194 } else {
195 VkDescriptorSet vkDS;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500196 if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
197 return nullptr;
198 }
egdaniela95220d2016-07-21 11:50:37 -0700199
Jim Van Verth5082df12020-03-11 16:14:51 -0400200 ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
egdaniela95220d2016-07-21 11:50:37 -0700201 }
202 SkASSERT(ds);
203 return ds;
204}
205
206void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
207 SkASSERT(descSet);
208 fFreeSets.push_back(descSet);
209}
210
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500211void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700212 fPoolManager.freeGPUResources(gpu);
213
214 for (int i = 0; i < fFreeSets.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400215 fFreeSets[i]->unref();
egdaniela95220d2016-07-21 11:50:37 -0700216 }
217 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500218
219 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
220 if (fImmutableSamplers[i]) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400221 fImmutableSamplers[i]->unref();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500222 }
223 }
224 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700225}
226
egdaniel707bbd62016-07-26 07:19:47 -0700227bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
228 const GrVkUniformHandler* uniHandler) const {
229 SkASSERT(uniHandler);
230 if (type != fPoolManager.fDescType) {
231 return false;
232 }
233
Brian Salomon662ea4b2018-07-12 14:53:49 -0400234 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
235 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
236 return false;
237 }
238 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500239 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
240 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700241 return false;
242 }
egdaniel707bbd62016-07-26 07:19:47 -0700243 }
244 return true;
245}
246
egdaniela95220d2016-07-21 11:50:37 -0700247////////////////////////////////////////////////////////////////////////////////
248
egdaniel707bbd62016-07-26 07:19:47 -0700249GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
Greg Daniel9b63dc82019-11-06 09:21:55 -0500250 VkDescriptorSetLayout layout,
egdaniel707bbd62016-07-26 07:19:47 -0700251 VkDescriptorType type,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500252 uint32_t descCountPerSet)
253 : fDescLayout(layout)
254 , fDescType(type)
255 , fDescCountPerSet(descCountPerSet)
256 , fMaxDescriptors(kStartNumDescriptors)
egdaniel4d866df2016-08-25 13:52:00 -0700257 , fCurrentDescriptorCount(0)
258 , fPool(nullptr) {
egdaniel707bbd62016-07-26 07:19:47 -0700259}
260
Greg Daniel9b63dc82019-11-06 09:21:55 -0500261bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700262 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400263 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700264 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
265 if (newPoolSize < kMaxDescriptors) {
266 fMaxDescriptors = newPoolSize;
267 } else {
268 fMaxDescriptors = kMaxDescriptors;
269 }
270
271 }
272 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
273 fMaxDescriptors);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500274 return SkToBool(fPool);
egdaniela95220d2016-07-21 11:50:37 -0700275}
276
Greg Daniel9b63dc82019-11-06 09:21:55 -0500277bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
egdaniela95220d2016-07-21 11:50:37 -0700278 VkDescriptorSet* ds) {
279 if (!fMaxDescriptors) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500280 return false;
egdaniela95220d2016-07-21 11:50:37 -0700281 }
282 fCurrentDescriptorCount += fDescCountPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500283 if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
284 if (!this->getNewPool(gpu) ) {
285 return false;
286 }
egdaniela95220d2016-07-21 11:50:37 -0700287 fCurrentDescriptorCount = fDescCountPerSet;
288 }
289
290 VkDescriptorSetAllocateInfo dsAllocateInfo;
291 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
292 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
293 dsAllocateInfo.pNext = nullptr;
294 dsAllocateInfo.descriptorPool = fPool->descPool();
295 dsAllocateInfo.descriptorSetCount = 1;
296 dsAllocateInfo.pSetLayouts = &fDescLayout;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500297 VkResult result;
298 GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
299 &dsAllocateInfo,
300 ds));
301 return result == VK_SUCCESS;
egdaniela95220d2016-07-21 11:50:37 -0700302}
303
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500304void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700305 if (fDescLayout) {
306 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
307 nullptr));
308 fDescLayout = VK_NULL_HANDLE;
309 }
egdaniela95220d2016-07-21 11:50:37 -0700310
311 if (fPool) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400312 fPool->unref();
egdaniela95220d2016-07-21 11:50:37 -0700313 fPool = nullptr;
314 }
315}
316