blob: 060bc8793db84393fc942762e9dcf56097e3294a [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Ethan Nicholas8e265a72018-12-12 16:22:40 -050010#include "GrContextPriv.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040011#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkCommandBuffer.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050013#include "GrVkCommandPool.h"
egdanielbc9b2962016-09-27 08:00:53 -070014#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000015#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkRenderTarget.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070018#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019#include "GrVkUtil.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050020#include "SkTaskGroup.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050021
22#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050023std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050024#endif
25
egdaniel778555c2016-05-02 06:50:36 -070026GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
27 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070028 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070029 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050030}
31
32GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070033 SkASSERT(0 == fRenderPassArray.count());
Greg Danielb46add82019-01-02 14:51:29 -050034 SkASSERT(0 == fExternalRenderPasses.count());
jvanverth03509ea2016-03-02 13:19:47 -080035 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070036 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080037}
38
39void GrVkResourceProvider::init() {
40 VkPipelineCacheCreateInfo createInfo;
41 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
42 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
43 createInfo.pNext = nullptr;
44 createInfo.flags = 0;
45 createInfo.initialDataSize = 0;
46 createInfo.pInitialData = nullptr;
47 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
48 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
49 &fPipelineCache));
50 SkASSERT(VK_SUCCESS == result);
51 if (VK_SUCCESS != result) {
52 fPipelineCache = VK_NULL_HANDLE;
53 }
egdaniel778555c2016-05-02 06:50:36 -070054
egdaniel707bbd62016-07-26 07:19:47 -070055 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040056 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
57 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070058 SkASSERT(1 == fDescriptorSetManagers.count());
59 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050060}
61
Brian Salomonff168d92018-06-23 15:17:27 -040062GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPrimitiveProcessor& primProc,
63 const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070064 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050065 VkPipelineShaderStageCreateInfo* shaderStageInfo,
66 int shaderStageCount,
67 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -040068 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -050069 VkPipelineLayout layout) {
Brian Salomonff168d92018-06-23 15:17:27 -040070 return GrVkPipeline::Create(fGpu, primProc, pipeline, stencil, shaderStageInfo,
Greg Daniel99b88e02018-10-03 15:31:20 -040071 shaderStageCount, primitiveType, compatibleRenderPass, layout,
csmartdaltonc633abb2016-11-01 08:55:55 -070072 fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050073}
74
egdanielbc9b2962016-09-27 08:00:53 -070075GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
76 const GrVkRenderTarget* dst,
77 VkPipelineShaderStageCreateInfo* shaderStageInfo,
78 VkPipelineLayout pipelineLayout) {
79 // Find or Create a compatible pipeline
80 GrVkCopyPipeline* pipeline = nullptr;
81 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
82 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
83 pipeline = fCopyPipelines[i];
84 }
85 }
86 if (!pipeline) {
87 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
88 pipelineLayout,
89 dst->numColorSamples(),
90 *dst->simpleRenderPass(),
91 fPipelineCache);
Greg Danielf3a4ef92018-03-01 11:34:59 -050092 if (!pipeline) {
93 return nullptr;
94 }
egdanielbc9b2962016-09-27 08:00:53 -070095 fCopyPipelines.push_back(pipeline);
96 }
97 SkASSERT(pipeline);
98 pipeline->ref();
99 return pipeline;
100}
Greg Daniel164a9f02016-02-22 09:56:40 -0500101
102// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700103// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500104// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700105const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700106GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
107 CompatibleRPHandle* compatibleHandle) {
108 for (int i = 0; i < fRenderPassArray.count(); ++i) {
109 if (fRenderPassArray[i].isCompatible(target)) {
110 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500111 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700112 if (compatibleHandle) {
113 *compatibleHandle = CompatibleRPHandle(i);
114 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500115 return renderPass;
116 }
117 }
118
egdanield62e28b2016-06-07 08:43:30 -0700119 const GrVkRenderPass* renderPass =
120 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
121 renderPass->ref();
122
123 if (compatibleHandle) {
124 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
125 }
126 return renderPass;
127}
128
129const GrVkRenderPass*
130GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
131 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
132 int index = compatibleHandle.toIndex();
133 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500134 renderPass->ref();
135 return renderPass;
136}
137
Greg Danielb46add82019-01-02 14:51:29 -0500138const GrVkRenderPass* GrVkResourceProvider::findCompatibleExternalRenderPass(
139 VkRenderPass renderPass, uint32_t colorAttachmentIndex) {
140 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
141 if (fExternalRenderPasses[i]->isCompatibleExternalRP(renderPass)) {
142 fExternalRenderPasses[i]->ref();
143#ifdef SK_DEBUG
144 uint32_t cachedColorIndex;
145 SkASSERT(fExternalRenderPasses[i]->colorAttachmentIndex(&cachedColorIndex));
146 SkASSERT(cachedColorIndex == colorAttachmentIndex);
147#endif
148 return fExternalRenderPasses[i];
149 }
150 }
151
152 const GrVkRenderPass* newRenderPass = new GrVkRenderPass(renderPass, colorAttachmentIndex);
153 fExternalRenderPasses.push_back(newRenderPass);
154 newRenderPass->ref();
155 return newRenderPass;
156}
157
egdaniel2feb0932016-06-08 06:48:09 -0700158const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
159 const GrVkRenderTarget& target,
160 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700161 const GrVkRenderPass::LoadStoreOps& stencilOps,
162 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700163 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
164 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
165 : &tempRPHandle;
166 *pRPHandle = target.compatibleRenderPassHandle();
167
egdaniel2feb0932016-06-08 06:48:09 -0700168 // This will get us the handle to (and possible create) the compatible set for the specific
169 // GrVkRenderPass we are looking for.
170 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400171 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700172}
173
174const GrVkRenderPass*
175GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
176 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700177 const GrVkRenderPass::LoadStoreOps& stencilOps) {
178 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
179 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
180 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
181 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700182 stencilOps);
183 renderPass->ref();
184 return renderPass;
185}
186
Greg Daniel164a9f02016-02-22 09:56:40 -0500187GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700188 VkDescriptorType type, uint32_t count) {
189 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500190}
191
Greg Daniel7e000222018-12-03 10:08:21 -0500192GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
193 const GrSamplerState& params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
194 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800195 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500196 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
197 if (!sampler) {
198 return nullptr;
199 }
egdaniel8b6394c2016-03-04 07:35:10 -0800200 fSamplers.add(sampler);
201 }
202 SkASSERT(sampler);
203 sampler->ref();
204 return sampler;
205}
206
Greg Daniel7e000222018-12-03 10:08:21 -0500207GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
208 const GrVkYcbcrConversionInfo& ycbcrInfo) {
209 GrVkSamplerYcbcrConversion* ycbcrConversion =
210 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
211 if (!ycbcrConversion) {
212 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
213 if (!ycbcrConversion) {
214 return nullptr;
215 }
216 fYcbcrConversions.add(ycbcrConversion);
217 }
218 SkASSERT(ycbcrConversion);
219 ycbcrConversion->ref();
220 return ycbcrConversion;
221}
222
Greg Daniel09eeefb2017-10-16 15:15:02 -0400223GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Greg Daniel9a51a862018-11-30 10:18:14 -0500224 const GrPipeline& pipeline, const GrPrimitiveProcessor& proc,
225 const GrTextureProxy* const primProcProxies[], GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400226 VkRenderPass compatibleRenderPass) {
Greg Daniel9a51a862018-11-30 10:18:14 -0500227 return fPipelineStateCache->refPipelineState(proc, primProcProxies, pipeline, primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400228 compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700229}
230
Greg Daniela7543782017-05-02 14:01:43 -0400231void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
232 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700233 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700234 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400235 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
236 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700237 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400238 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700239 *handle = GrVkDescriptorSetManager::Handle(i);
240 return;
241 }
242 }
243
Greg Daniel18f96022017-05-04 15:09:03 -0400244 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
245 uniformHandler);
246 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700247 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
248}
249
Greg Daniela7543782017-05-02 14:01:43 -0400250void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
251 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700252 GrVkDescriptorSetManager::Handle* handle) {
253 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400254 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
255 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700256 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400257 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700258 *handle = GrVkDescriptorSetManager::Handle(i);
259 return;
260 }
261 }
262
Greg Daniel18f96022017-05-04 15:09:03 -0400263 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
264 visibilities);
265 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700266 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
267}
268
egdaniel707bbd62016-07-26 07:19:47 -0700269VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
270 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400271 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700272}
273
274VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
275 const GrVkDescriptorSetManager::Handle& handle) const {
276 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400277 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700278}
279
egdaniela95220d2016-07-21 11:50:37 -0700280const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
281 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400282 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
283 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700284}
285
egdaniel707bbd62016-07-26 07:19:47 -0700286const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
287 const GrVkDescriptorSetManager::Handle& handle) {
288 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400289 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700290}
egdaniela95220d2016-07-21 11:50:37 -0700291
292void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
293 const GrVkDescriptorSetManager::Handle& handle) {
294 SkASSERT(descSet);
295 SkASSERT(handle.isValid());
296 int managerIdx = handle.toIndex();
297 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400298 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700299}
300
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500301GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
302 std::unique_lock<std::recursive_mutex> lock(fBackgroundMutex);
303 GrVkCommandPool* result;
304 if (fAvailableCommandPools.count()) {
305 result = fAvailableCommandPools.back();
306 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700307 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500308 result = GrVkCommandPool::Create(fGpu);
jvanverth7ec92412016-07-06 09:24:57 -0700309 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500310 SkASSERT(result->unique());
311 SkDEBUGCODE(
312 for (const GrVkCommandPool* pool : fActiveCommandPools) {
313 SkASSERT(pool != result);
314 }
315 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
316 SkASSERT(pool != result);
317 }
318 );
319 fActiveCommandPools.push_back(result);
320 result->ref();
321 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500322}
323
324void GrVkResourceProvider::checkCommandBuffers() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500325 for (int i = fActiveCommandPools.count() - 1; i >= 0; --i) {
326 GrVkCommandPool* pool = fActiveCommandPools[i];
327 if (!pool->isOpen()) {
328 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
329 if (buffer->finished(fGpu)) {
330 fActiveCommandPools.removeShuffle(i);
331 this->backgroundReset(pool);
332 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500333 }
334 }
335}
336
jvanverth4c6e47a2016-07-22 10:34:52 -0700337const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
338 const GrVkResource* resource = nullptr;
339 int count = fAvailableUniformBufferResources.count();
340 if (count > 0) {
341 resource = fAvailableUniformBufferResources[count - 1];
342 fAvailableUniformBufferResources.removeShuffle(count - 1);
343 } else {
344 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
345 }
346 return resource;
347}
348
349void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
350 fAvailableUniformBufferResources.push_back(resource);
351}
352
Jim Van Verth09557d72016-11-07 11:10:21 -0500353void GrVkResourceProvider::destroyResources(bool deviceLost) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500354 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
355 if (taskGroup) {
356 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000357 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000358
egdanielbc9b2962016-09-27 08:00:53 -0700359 // Release all copy pipelines
360 for (int i = 0; i < fCopyPipelines.count(); ++i) {
361 fCopyPipelines[i]->unref(fGpu);
362 }
363
egdanield62e28b2016-06-07 08:43:30 -0700364 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
365 for (int i = 0; i < fRenderPassArray.count(); ++i) {
366 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500367 }
egdanield62e28b2016-06-07 08:43:30 -0700368 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500369
Greg Danielb46add82019-01-02 14:51:29 -0500370 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
371 fExternalRenderPasses[i]->unref(fGpu);
372 }
373 fExternalRenderPasses.reset();
374
egdaniel8b6394c2016-03-04 07:35:10 -0800375 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500376 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800377 for (; !iter.done(); ++iter) {
378 (*iter).unref(fGpu);
379 }
380 fSamplers.reset();
381
egdaniel22281c12016-03-23 13:49:40 -0700382 fPipelineStateCache->release();
383
jvanverth03509ea2016-03-02 13:19:47 -0800384 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
385 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700386
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500387 for (GrVkCommandPool* pool : fActiveCommandPools) {
388 SkASSERT(pool->unique());
389 pool->unref(fGpu);
390 }
391 fActiveCommandPools.reset();
392
393 for (GrVkCommandPool* pool : fAvailableCommandPools) {
394 SkASSERT(pool->unique());
395 pool->unref(fGpu);
396 }
397 fAvailableCommandPools.reset();
398
egdaniela95220d2016-07-21 11:50:37 -0700399 // We must release/destroy all command buffers and pipeline states before releasing the
400 // GrVkDescriptorSetManagers
401 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400402 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700403 }
404 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700405
406 // release our uniform buffers
407 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
408 SkASSERT(fAvailableUniformBufferResources[i]->unique());
409 fAvailableUniformBufferResources[i]->unref(fGpu);
410 }
411 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500412}
413
414void GrVkResourceProvider::abandonResources() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500415 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
416 if (taskGroup) {
417 taskGroup->wait();
Greg Daniel164a9f02016-02-22 09:56:40 -0500418 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000419
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500420 // Abandon all command pools
421 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
422 SkASSERT(fActiveCommandPools[i]->unique());
423 fActiveCommandPools[i]->unrefAndAbandon();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000424 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500425 fActiveCommandPools.reset();
426 for (int i = 0; i < fAvailableCommandPools.count(); ++i) {
427 SkASSERT(fAvailableCommandPools[i]->unique());
428 fAvailableCommandPools[i]->unrefAndAbandon();
429 }
430 fAvailableCommandPools.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500431
egdanielbc9b2962016-09-27 08:00:53 -0700432 // Abandon all copy pipelines
433 for (int i = 0; i < fCopyPipelines.count(); ++i) {
434 fCopyPipelines[i]->unrefAndAbandon();
435 }
436
egdanield62e28b2016-06-07 08:43:30 -0700437 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
438 for (int i = 0; i < fRenderPassArray.count(); ++i) {
439 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500440 }
egdanield62e28b2016-06-07 08:43:30 -0700441 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500442
Greg Danielb46add82019-01-02 14:51:29 -0500443 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
444 fExternalRenderPasses[i]->unrefAndAbandon();
445 }
446 fExternalRenderPasses.reset();
447
egdaniel8b6394c2016-03-04 07:35:10 -0800448 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500449 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800450 for (; !iter.done(); ++iter) {
451 (*iter).unrefAndAbandon();
452 }
453 fSamplers.reset();
454
egdaniel22281c12016-03-23 13:49:40 -0700455 fPipelineStateCache->abandon();
456
jvanverth03509ea2016-03-02 13:19:47 -0800457 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700458
egdaniela95220d2016-07-21 11:50:37 -0700459 // We must abandon all command buffers and pipeline states before abandoning the
460 // GrVkDescriptorSetManagers
461 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400462 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700463 }
464 fDescriptorSetManagers.reset();
465
jvanverth4c6e47a2016-07-22 10:34:52 -0700466 // release our uniform buffers
467 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
468 SkASSERT(fAvailableUniformBufferResources[i]->unique());
469 fAvailableUniformBufferResources[i]->unrefAndAbandon();
470 }
471 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800472}
egdanield62e28b2016-06-07 08:43:30 -0700473
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500474void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
475 SkASSERT(pool->unique());
476 pool->releaseResources(fGpu);
477 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
478 if (taskGroup) {
479 taskGroup->add([this, pool]() {
480 this->reset(pool);
481 });
482 } else {
483 this->reset(pool);
484 }
485}
486
487void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
488 SkASSERT(pool->unique());
489 pool->reset(fGpu);
490 std::unique_lock<std::recursive_mutex> providerLock(fBackgroundMutex);
491 fAvailableCommandPools.push_back(pool);
492}
493
egdanield62e28b2016-06-07 08:43:30 -0700494////////////////////////////////////////////////////////////////////////////////
495
496GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
497 const GrVkGpu* gpu,
498 const GrVkRenderTarget& target)
499 : fLastReturnedIndex(0) {
500 fRenderPasses.emplace_back(new GrVkRenderPass());
501 fRenderPasses[0]->initSimple(gpu, target);
502}
503
504bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
505 const GrVkRenderTarget& target) const {
506 // The first GrVkRenderpass should always exists since we create the basic load store
507 // render pass on create
508 SkASSERT(fRenderPasses[0]);
509 return fRenderPasses[0]->isCompatible(target);
510}
511
egdaniel2feb0932016-06-08 06:48:09 -0700512GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
513 const GrVkGpu* gpu,
514 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700515 const GrVkRenderPass::LoadStoreOps& stencilOps) {
516 for (int i = 0; i < fRenderPasses.count(); ++i) {
517 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700518 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700519 fLastReturnedIndex = idx;
520 return fRenderPasses[idx];
521 }
522 }
egdaniel9cb63402016-06-23 08:37:05 -0700523 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700524 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700525 fLastReturnedIndex = fRenderPasses.count() - 1;
526 return renderPass;
527}
528
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500529void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(GrVkGpu* gpu) {
egdanield62e28b2016-06-07 08:43:30 -0700530 for (int i = 0; i < fRenderPasses.count(); ++i) {
531 if (fRenderPasses[i]) {
532 fRenderPasses[i]->unref(gpu);
533 fRenderPasses[i] = nullptr;
534 }
535 }
536}
537
538void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
539 for (int i = 0; i < fRenderPasses.count(); ++i) {
540 if (fRenderPasses[i]) {
541 fRenderPasses[i]->unrefAndAbandon();
542 fRenderPasses[i] = nullptr;
543 }
544 }
545}