blob: 38b33a0ef6a2f8dbec8d08c3dec0c37ece243e29 [file] [log] [blame]
egdaniela95220d2016-07-21 11:50:37 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkDescriptorSetManager.h"
egdaniela95220d2016-07-21 11:50:37 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/vk/GrVkDescriptorPool.h"
11#include "src/gpu/vk/GrVkDescriptorSet.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkUniformHandler.h"
egdaniela95220d2016-07-21 11:50:37 -070014
Ben Wagner6c30e742019-02-06 10:46:14 -050015#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16#include <sanitizer/lsan_interface.h>
17#endif
18
Greg Daniel18f96022017-05-04 15:09:03 -040019GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040020 SkSTArray<1, uint32_t> visibilities;
21 uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040022 if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
Ethan Nicholas0be34802019-08-15 12:36:58 -040023 stages |= kGeometry_GrShaderFlag;
Greg Daniel18f96022017-05-04 15:09:03 -040024 }
Ethan Nicholas0be34802019-08-15 12:36:58 -040025 visibilities.push_back(stages);
Greg Daniel7a82edf2018-12-04 10:54:34 -050026 SkTArray<const GrVkSampler*> samplers;
Greg Daniel9b63dc82019-11-06 09:21:55 -050027 return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
Greg Daniel18f96022017-05-04 15:09:03 -040028}
29
30GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
31 GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
32 SkSTArray<4, uint32_t> visibilities;
Greg Daniel7a82edf2018-12-04 10:54:34 -050033 SkSTArray<4, const GrVkSampler*> immutableSamplers;
Brian Salomon662ea4b2018-07-12 14:53:49 -040034 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
35 for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
36 visibilities.push_back(uniformHandler.samplerVisibility(i));
Greg Daniel7a82edf2018-12-04 10:54:34 -050037 immutableSamplers.push_back(uniformHandler.immutableSampler(i));
egdaniel707bbd62016-07-26 07:19:47 -070038 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050039 return Create(gpu, type, visibilities, immutableSamplers);
Greg Daniel18f96022017-05-04 15:09:03 -040040}
41
42GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
43 GrVkGpu* gpu, VkDescriptorType type, const SkTArray<uint32_t>& visibilities) {
Greg Daniel7a82edf2018-12-04 10:54:34 -050044 SkSTArray<4, const GrVkSampler*> immutableSamplers;
45 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
46 for (int i = 0 ; i < visibilities.count(); ++i) {
47 immutableSamplers.push_back(nullptr);
48 }
Greg Daniel9b63dc82019-11-06 09:21:55 -050049 return Create(gpu, type, visibilities, immutableSamplers);
egdaniela95220d2016-07-21 11:50:37 -070050}
51
Greg Daniel9b63dc82019-11-06 09:21:55 -050052VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
53 VkShaderStageFlags flags = 0;
54
55 if (visibility & kVertex_GrShaderFlag) {
56 flags |= VK_SHADER_STAGE_VERTEX_BIT;
57 }
58 if (visibility & kGeometry_GrShaderFlag) {
59 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
60 }
61 if (visibility & kFragment_GrShaderFlag) {
62 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
63 }
64 return flags;
65}
66
67static bool get_layout_and_desc_count(GrVkGpu* gpu,
68 VkDescriptorType type,
69 const SkTArray<uint32_t>& visibilities,
70 const SkTArray<const GrVkSampler*>& immutableSamplers,
71 VkDescriptorSetLayout* descSetLayout,
72 uint32_t* descCountPerSet) {
73 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
74 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
75 uint32_t numBindings = visibilities.count();
76 std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
77 new VkDescriptorSetLayoutBinding[numBindings]);
78 for (uint32_t i = 0; i < numBindings; ++i) {
79 uint32_t visibility = visibilities[i];
80 dsSamplerBindings[i].binding = i;
81 dsSamplerBindings[i].descriptorType = type;
82 dsSamplerBindings[i].descriptorCount = 1;
83 dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
84 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
85 if (immutableSamplers[i]) {
86 dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
87 } else {
88 dsSamplerBindings[i].pImmutableSamplers = nullptr;
89 }
90 }
91 }
92
93 VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
94 memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
95 dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
96 dsSamplerLayoutCreateInfo.pNext = nullptr;
97 dsSamplerLayoutCreateInfo.flags = 0;
98 dsSamplerLayoutCreateInfo.bindingCount = numBindings;
99 // Setting to nullptr fixes an error in the param checker validation layer. Even though
100 // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
101 // null.
102 dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
103
104#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
105 // skia:8713
106 __lsan::ScopedDisabler lsanDisabler;
107#endif
108 VkResult result;
109 GR_VK_CALL_RESULT(gpu, result,
110 CreateDescriptorSetLayout(gpu->device(),
111 &dsSamplerLayoutCreateInfo,
112 nullptr,
113 descSetLayout));
114 if (result != VK_SUCCESS) {
115 return false;
116 }
117
118 *descCountPerSet = visibilities.count();
119 } else {
120 SkASSERT(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type);
121 static constexpr int kUniformDescPerSet = 1;
122 SkASSERT(kUniformDescPerSet == visibilities.count());
123 // Create Uniform Buffer Descriptor
124 VkDescriptorSetLayoutBinding dsUniBinding;
125 memset(&dsUniBinding, 0, sizeof(dsUniBinding));
126 dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
127 dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
128 dsUniBinding.descriptorCount = 1;
129 dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
130 dsUniBinding.pImmutableSamplers = nullptr;
131
132 VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
133 memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
134 uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
135 uniformLayoutCreateInfo.pNext = nullptr;
136 uniformLayoutCreateInfo.flags = 0;
137 uniformLayoutCreateInfo.bindingCount = 1;
138 uniformLayoutCreateInfo.pBindings = &dsUniBinding;
139
140#if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
141 // skia:8713
142 __lsan::ScopedDisabler lsanDisabler;
143#endif
144 VkResult result;
145 GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
146 &uniformLayoutCreateInfo,
147 nullptr,
148 descSetLayout));
149 if (result != VK_SUCCESS) {
150 return false;
151 }
152
153 *descCountPerSet = kUniformDescPerSet;
154 }
155 return true;
156}
157
158GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
Greg Daniel7a82edf2018-12-04 10:54:34 -0500159 GrVkGpu* gpu, VkDescriptorType type,
160 const SkTArray<uint32_t>& visibilities,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500161 const SkTArray<const GrVkSampler*>& immutableSamplers) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500162#ifdef SK_DEBUG
163 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
164 SkASSERT(visibilities.count() == immutableSamplers.count());
165 } else {
166 SkASSERT(immutableSamplers.count() == 0);
167 }
168#endif
Greg Daniel9b63dc82019-11-06 09:21:55 -0500169
170 VkDescriptorSetLayout descSetLayout;
171 uint32_t descCountPerSet;
172 if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
173 &descCountPerSet)) {
174 return nullptr;
175 }
176 return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
177 immutableSamplers);
178}
179
180GrVkDescriptorSetManager::GrVkDescriptorSetManager(
181 GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
182 uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
183 const SkTArray<const GrVkSampler*>& immutableSamplers)
184 : fPoolManager(descSetLayout, type, descCountPerSet) {
Greg Daniel18f96022017-05-04 15:09:03 -0400185 for (int i = 0; i < visibilities.count(); ++i) {
186 fBindingVisibilities.push_back(visibilities[i]);
egdaniel4d866df2016-08-25 13:52:00 -0700187 }
Greg Daniel7a82edf2018-12-04 10:54:34 -0500188 for (int i = 0; i < immutableSamplers.count(); ++i) {
189 const GrVkSampler* sampler = immutableSamplers[i];
190 if (sampler) {
191 sampler->ref();
192 }
193 fImmutableSamplers.push_back(sampler);
194 }
egdaniel4d866df2016-08-25 13:52:00 -0700195}
196
egdaniela95220d2016-07-21 11:50:37 -0700197const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
198 const Handle& handle) {
199 const GrVkDescriptorSet* ds = nullptr;
200 int count = fFreeSets.count();
201 if (count > 0) {
202 ds = fFreeSets[count - 1];
203 fFreeSets.removeShuffle(count - 1);
204 } else {
205 VkDescriptorSet vkDS;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500206 if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
207 return nullptr;
208 }
egdaniela95220d2016-07-21 11:50:37 -0700209
210 ds = new GrVkDescriptorSet(vkDS, fPoolManager.fPool, handle);
211 }
212 SkASSERT(ds);
213 return ds;
214}
215
216void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
217 SkASSERT(descSet);
218 fFreeSets.push_back(descSet);
219}
220
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500221void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700222 fPoolManager.freeGPUResources(gpu);
223
224 for (int i = 0; i < fFreeSets.count(); ++i) {
225 fFreeSets[i]->unref(gpu);
226 }
227 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500228
229 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
230 if (fImmutableSamplers[i]) {
231 fImmutableSamplers[i]->unref(gpu);
232 }
233 }
234 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700235}
236
237void GrVkDescriptorSetManager::abandon() {
238 fPoolManager.abandonGPUResources();
239
240 for (int i = 0; i < fFreeSets.count(); ++i) {
241 fFreeSets[i]->unrefAndAbandon();
242 }
243 fFreeSets.reset();
Greg Daniel7a82edf2018-12-04 10:54:34 -0500244
245 for (int i = 0; i < fImmutableSamplers.count(); ++i) {
246 if (fImmutableSamplers[i]) {
247 fImmutableSamplers[i]->unrefAndAbandon();
248 }
249 }
250 fImmutableSamplers.reset();
egdaniela95220d2016-07-21 11:50:37 -0700251}
252
egdaniel707bbd62016-07-26 07:19:47 -0700253bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
254 const GrVkUniformHandler* uniHandler) const {
255 SkASSERT(uniHandler);
256 if (type != fPoolManager.fDescType) {
257 return false;
258 }
259
Brian Salomon662ea4b2018-07-12 14:53:49 -0400260 SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
261 if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
262 return false;
263 }
264 for (int i = 0; i < uniHandler->numSamplers(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500265 if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
266 uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
egdaniel707bbd62016-07-26 07:19:47 -0700267 return false;
268 }
egdaniel707bbd62016-07-26 07:19:47 -0700269 }
270 return true;
271}
272
egdaniel4d866df2016-08-25 13:52:00 -0700273bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
274 const SkTArray<uint32_t>& visibilities) const {
275 if (type != fPoolManager.fDescType) {
276 return false;
277 }
278
Greg Daniela7543782017-05-02 14:01:43 -0400279 if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
280 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
egdaniel4d866df2016-08-25 13:52:00 -0700281 if (fBindingVisibilities.count() != visibilities.count()) {
282 return false;
283 }
284 for (int i = 0; i < visibilities.count(); ++i) {
Greg Daniel7a82edf2018-12-04 10:54:34 -0500285 if (visibilities[i] != fBindingVisibilities[i] || fImmutableSamplers[i] != nullptr) {
egdaniel4d866df2016-08-25 13:52:00 -0700286 return false;
287 }
288 }
289 }
290 return true;
291}
292
egdaniela95220d2016-07-21 11:50:37 -0700293////////////////////////////////////////////////////////////////////////////////
294
egdaniel707bbd62016-07-26 07:19:47 -0700295GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
Greg Daniel9b63dc82019-11-06 09:21:55 -0500296 VkDescriptorSetLayout layout,
egdaniel707bbd62016-07-26 07:19:47 -0700297 VkDescriptorType type,
Greg Daniel9b63dc82019-11-06 09:21:55 -0500298 uint32_t descCountPerSet)
299 : fDescLayout(layout)
300 , fDescType(type)
301 , fDescCountPerSet(descCountPerSet)
302 , fMaxDescriptors(kStartNumDescriptors)
egdaniel4d866df2016-08-25 13:52:00 -0700303 , fCurrentDescriptorCount(0)
304 , fPool(nullptr) {
egdaniel707bbd62016-07-26 07:19:47 -0700305}
306
Greg Daniel9b63dc82019-11-06 09:21:55 -0500307bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
egdaniela95220d2016-07-21 11:50:37 -0700308 if (fPool) {
309 fPool->unref(gpu);
310 uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
311 if (newPoolSize < kMaxDescriptors) {
312 fMaxDescriptors = newPoolSize;
313 } else {
314 fMaxDescriptors = kMaxDescriptors;
315 }
316
317 }
318 fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
319 fMaxDescriptors);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500320 return SkToBool(fPool);
egdaniela95220d2016-07-21 11:50:37 -0700321}
322
Greg Daniel9b63dc82019-11-06 09:21:55 -0500323bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
egdaniela95220d2016-07-21 11:50:37 -0700324 VkDescriptorSet* ds) {
325 if (!fMaxDescriptors) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500326 return false;
egdaniela95220d2016-07-21 11:50:37 -0700327 }
328 fCurrentDescriptorCount += fDescCountPerSet;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500329 if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
330 if (!this->getNewPool(gpu) ) {
331 return false;
332 }
egdaniela95220d2016-07-21 11:50:37 -0700333 fCurrentDescriptorCount = fDescCountPerSet;
334 }
335
336 VkDescriptorSetAllocateInfo dsAllocateInfo;
337 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
338 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
339 dsAllocateInfo.pNext = nullptr;
340 dsAllocateInfo.descriptorPool = fPool->descPool();
341 dsAllocateInfo.descriptorSetCount = 1;
342 dsAllocateInfo.pSetLayouts = &fDescLayout;
Greg Daniel9b63dc82019-11-06 09:21:55 -0500343 VkResult result;
344 GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
345 &dsAllocateInfo,
346 ds));
347 return result == VK_SUCCESS;
egdaniela95220d2016-07-21 11:50:37 -0700348}
349
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500350void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
egdaniel707bbd62016-07-26 07:19:47 -0700351 if (fDescLayout) {
352 GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
353 nullptr));
354 fDescLayout = VK_NULL_HANDLE;
355 }
egdaniela95220d2016-07-21 11:50:37 -0700356
357 if (fPool) {
358 fPool->unref(gpu);
359 fPool = nullptr;
360 }
361}
362
363void GrVkDescriptorSetManager::DescriptorPoolManager::abandonGPUResources() {
364 fDescLayout = VK_NULL_HANDLE;
365 if (fPool) {
366 fPool->unrefAndAbandon();
367 fPool = nullptr;
368 }
369}