blob: 99b4d12cdf6a0b87f1d748e6a9ac7ff0bc5f2c35 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Ethan Nicholas8e265a72018-12-12 16:22:40 -050010#include "GrContextPriv.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040011#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkCommandBuffer.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050013#include "GrVkCommandPool.h"
egdanielbc9b2962016-09-27 08:00:53 -070014#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000015#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkRenderTarget.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070018#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019#include "GrVkUtil.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050020#include "SkTaskGroup.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050021
22#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050023std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050024#endif
25
egdaniel778555c2016-05-02 06:50:36 -070026GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
27 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070028 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070029 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050030}
31
32GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070033 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080034 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070035 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080036}
37
38void GrVkResourceProvider::init() {
39 VkPipelineCacheCreateInfo createInfo;
40 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
41 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
42 createInfo.pNext = nullptr;
43 createInfo.flags = 0;
44 createInfo.initialDataSize = 0;
45 createInfo.pInitialData = nullptr;
46 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
47 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
48 &fPipelineCache));
49 SkASSERT(VK_SUCCESS == result);
50 if (VK_SUCCESS != result) {
51 fPipelineCache = VK_NULL_HANDLE;
52 }
egdaniel778555c2016-05-02 06:50:36 -070053
egdaniel707bbd62016-07-26 07:19:47 -070054 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040055 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
56 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070057 SkASSERT(1 == fDescriptorSetManagers.count());
58 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050059}
60
Brian Salomonff168d92018-06-23 15:17:27 -040061GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPrimitiveProcessor& primProc,
62 const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070063 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050064 VkPipelineShaderStageCreateInfo* shaderStageInfo,
65 int shaderStageCount,
66 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -040067 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -050068 VkPipelineLayout layout) {
Brian Salomonff168d92018-06-23 15:17:27 -040069 return GrVkPipeline::Create(fGpu, primProc, pipeline, stencil, shaderStageInfo,
Greg Daniel99b88e02018-10-03 15:31:20 -040070 shaderStageCount, primitiveType, compatibleRenderPass, layout,
csmartdaltonc633abb2016-11-01 08:55:55 -070071 fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050072}
73
egdanielbc9b2962016-09-27 08:00:53 -070074GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
75 const GrVkRenderTarget* dst,
76 VkPipelineShaderStageCreateInfo* shaderStageInfo,
77 VkPipelineLayout pipelineLayout) {
78 // Find or Create a compatible pipeline
79 GrVkCopyPipeline* pipeline = nullptr;
80 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
81 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
82 pipeline = fCopyPipelines[i];
83 }
84 }
85 if (!pipeline) {
86 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
87 pipelineLayout,
88 dst->numColorSamples(),
89 *dst->simpleRenderPass(),
90 fPipelineCache);
Greg Danielf3a4ef92018-03-01 11:34:59 -050091 if (!pipeline) {
92 return nullptr;
93 }
egdanielbc9b2962016-09-27 08:00:53 -070094 fCopyPipelines.push_back(pipeline);
95 }
96 SkASSERT(pipeline);
97 pipeline->ref();
98 return pipeline;
99}
Greg Daniel164a9f02016-02-22 09:56:40 -0500100
101// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700102// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500103// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700104const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700105GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
106 CompatibleRPHandle* compatibleHandle) {
107 for (int i = 0; i < fRenderPassArray.count(); ++i) {
108 if (fRenderPassArray[i].isCompatible(target)) {
109 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500110 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700111 if (compatibleHandle) {
112 *compatibleHandle = CompatibleRPHandle(i);
113 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500114 return renderPass;
115 }
116 }
117
egdanield62e28b2016-06-07 08:43:30 -0700118 const GrVkRenderPass* renderPass =
119 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
120 renderPass->ref();
121
122 if (compatibleHandle) {
123 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
124 }
125 return renderPass;
126}
127
128const GrVkRenderPass*
129GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
130 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
131 int index = compatibleHandle.toIndex();
132 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500133 renderPass->ref();
134 return renderPass;
135}
136
egdaniel2feb0932016-06-08 06:48:09 -0700137const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
138 const GrVkRenderTarget& target,
139 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700140 const GrVkRenderPass::LoadStoreOps& stencilOps,
141 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700142 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
143 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
144 : &tempRPHandle;
145 *pRPHandle = target.compatibleRenderPassHandle();
146
egdaniel2feb0932016-06-08 06:48:09 -0700147 // This will get us the handle to (and possible create) the compatible set for the specific
148 // GrVkRenderPass we are looking for.
149 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400150 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700151}
152
153const GrVkRenderPass*
154GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
155 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700156 const GrVkRenderPass::LoadStoreOps& stencilOps) {
157 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
158 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
159 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
160 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700161 stencilOps);
162 renderPass->ref();
163 return renderPass;
164}
165
Greg Daniel164a9f02016-02-22 09:56:40 -0500166GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700167 VkDescriptorType type, uint32_t count) {
168 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500169}
170
Greg Daniel7e000222018-12-03 10:08:21 -0500171GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
172 const GrSamplerState& params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
173 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800174 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500175 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
176 if (!sampler) {
177 return nullptr;
178 }
egdaniel8b6394c2016-03-04 07:35:10 -0800179 fSamplers.add(sampler);
180 }
181 SkASSERT(sampler);
182 sampler->ref();
183 return sampler;
184}
185
Greg Daniel7e000222018-12-03 10:08:21 -0500186GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
187 const GrVkYcbcrConversionInfo& ycbcrInfo) {
188 GrVkSamplerYcbcrConversion* ycbcrConversion =
189 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
190 if (!ycbcrConversion) {
191 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
192 if (!ycbcrConversion) {
193 return nullptr;
194 }
195 fYcbcrConversions.add(ycbcrConversion);
196 }
197 SkASSERT(ycbcrConversion);
198 ycbcrConversion->ref();
199 return ycbcrConversion;
200}
201
Greg Daniel09eeefb2017-10-16 15:15:02 -0400202GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Greg Daniel9a51a862018-11-30 10:18:14 -0500203 const GrPipeline& pipeline, const GrPrimitiveProcessor& proc,
204 const GrTextureProxy* const primProcProxies[], GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400205 VkRenderPass compatibleRenderPass) {
Greg Daniel9a51a862018-11-30 10:18:14 -0500206 return fPipelineStateCache->refPipelineState(proc, primProcProxies, pipeline, primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400207 compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700208}
209
Greg Daniela7543782017-05-02 14:01:43 -0400210void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
211 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700212 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700213 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400214 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
215 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700216 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400217 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700218 *handle = GrVkDescriptorSetManager::Handle(i);
219 return;
220 }
221 }
222
Greg Daniel18f96022017-05-04 15:09:03 -0400223 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
224 uniformHandler);
225 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700226 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
227}
228
Greg Daniela7543782017-05-02 14:01:43 -0400229void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
230 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700231 GrVkDescriptorSetManager::Handle* handle) {
232 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400233 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
234 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700235 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400236 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700237 *handle = GrVkDescriptorSetManager::Handle(i);
238 return;
239 }
240 }
241
Greg Daniel18f96022017-05-04 15:09:03 -0400242 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
243 visibilities);
244 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700245 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
246}
247
egdaniel707bbd62016-07-26 07:19:47 -0700248VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
249 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400250 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700251}
252
253VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
254 const GrVkDescriptorSetManager::Handle& handle) const {
255 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400256 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700257}
258
egdaniela95220d2016-07-21 11:50:37 -0700259const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
260 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400261 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
262 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700263}
264
egdaniel707bbd62016-07-26 07:19:47 -0700265const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
266 const GrVkDescriptorSetManager::Handle& handle) {
267 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400268 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700269}
egdaniela95220d2016-07-21 11:50:37 -0700270
271void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
272 const GrVkDescriptorSetManager::Handle& handle) {
273 SkASSERT(descSet);
274 SkASSERT(handle.isValid());
275 int managerIdx = handle.toIndex();
276 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400277 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700278}
279
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500280GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
281 std::unique_lock<std::recursive_mutex> lock(fBackgroundMutex);
282 GrVkCommandPool* result;
283 if (fAvailableCommandPools.count()) {
284 result = fAvailableCommandPools.back();
285 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700286 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500287 result = GrVkCommandPool::Create(fGpu);
jvanverth7ec92412016-07-06 09:24:57 -0700288 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500289 SkASSERT(result->unique());
290 SkDEBUGCODE(
291 for (const GrVkCommandPool* pool : fActiveCommandPools) {
292 SkASSERT(pool != result);
293 }
294 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
295 SkASSERT(pool != result);
296 }
297 );
298 fActiveCommandPools.push_back(result);
299 result->ref();
300 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500301}
302
303void GrVkResourceProvider::checkCommandBuffers() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500304 for (int i = fActiveCommandPools.count() - 1; i >= 0; --i) {
305 GrVkCommandPool* pool = fActiveCommandPools[i];
306 if (!pool->isOpen()) {
307 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
308 if (buffer->finished(fGpu)) {
309 fActiveCommandPools.removeShuffle(i);
310 this->backgroundReset(pool);
311 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500312 }
313 }
314}
315
jvanverth4c6e47a2016-07-22 10:34:52 -0700316const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
317 const GrVkResource* resource = nullptr;
318 int count = fAvailableUniformBufferResources.count();
319 if (count > 0) {
320 resource = fAvailableUniformBufferResources[count - 1];
321 fAvailableUniformBufferResources.removeShuffle(count - 1);
322 } else {
323 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
324 }
325 return resource;
326}
327
328void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
329 fAvailableUniformBufferResources.push_back(resource);
330}
331
Jim Van Verth09557d72016-11-07 11:10:21 -0500332void GrVkResourceProvider::destroyResources(bool deviceLost) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500333 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
334 if (taskGroup) {
335 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000336 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000337
egdanielbc9b2962016-09-27 08:00:53 -0700338 // Release all copy pipelines
339 for (int i = 0; i < fCopyPipelines.count(); ++i) {
340 fCopyPipelines[i]->unref(fGpu);
341 }
342
egdanield62e28b2016-06-07 08:43:30 -0700343 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
344 for (int i = 0; i < fRenderPassArray.count(); ++i) {
345 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500346 }
egdanield62e28b2016-06-07 08:43:30 -0700347 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500348
egdaniel8b6394c2016-03-04 07:35:10 -0800349 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500350 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800351 for (; !iter.done(); ++iter) {
352 (*iter).unref(fGpu);
353 }
354 fSamplers.reset();
355
egdaniel22281c12016-03-23 13:49:40 -0700356 fPipelineStateCache->release();
357
jvanverth03509ea2016-03-02 13:19:47 -0800358 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
359 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700360
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500361 for (GrVkCommandPool* pool : fActiveCommandPools) {
362 SkASSERT(pool->unique());
363 pool->unref(fGpu);
364 }
365 fActiveCommandPools.reset();
366
367 for (GrVkCommandPool* pool : fAvailableCommandPools) {
368 SkASSERT(pool->unique());
369 pool->unref(fGpu);
370 }
371 fAvailableCommandPools.reset();
372
egdaniela95220d2016-07-21 11:50:37 -0700373 // We must release/destroy all command buffers and pipeline states before releasing the
374 // GrVkDescriptorSetManagers
375 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400376 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700377 }
378 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700379
380 // release our uniform buffers
381 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
382 SkASSERT(fAvailableUniformBufferResources[i]->unique());
383 fAvailableUniformBufferResources[i]->unref(fGpu);
384 }
385 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500386}
387
388void GrVkResourceProvider::abandonResources() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500389 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
390 if (taskGroup) {
391 taskGroup->wait();
Greg Daniel164a9f02016-02-22 09:56:40 -0500392 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000393
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500394 // Abandon all command pools
395 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
396 SkASSERT(fActiveCommandPools[i]->unique());
397 fActiveCommandPools[i]->unrefAndAbandon();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000398 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500399 fActiveCommandPools.reset();
400 for (int i = 0; i < fAvailableCommandPools.count(); ++i) {
401 SkASSERT(fAvailableCommandPools[i]->unique());
402 fAvailableCommandPools[i]->unrefAndAbandon();
403 }
404 fAvailableCommandPools.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500405
egdanielbc9b2962016-09-27 08:00:53 -0700406 // Abandon all copy pipelines
407 for (int i = 0; i < fCopyPipelines.count(); ++i) {
408 fCopyPipelines[i]->unrefAndAbandon();
409 }
410
egdanield62e28b2016-06-07 08:43:30 -0700411 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
412 for (int i = 0; i < fRenderPassArray.count(); ++i) {
413 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500414 }
egdanield62e28b2016-06-07 08:43:30 -0700415 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500416
egdaniel8b6394c2016-03-04 07:35:10 -0800417 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500418 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800419 for (; !iter.done(); ++iter) {
420 (*iter).unrefAndAbandon();
421 }
422 fSamplers.reset();
423
egdaniel22281c12016-03-23 13:49:40 -0700424 fPipelineStateCache->abandon();
425
jvanverth03509ea2016-03-02 13:19:47 -0800426 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700427
egdaniela95220d2016-07-21 11:50:37 -0700428 // We must abandon all command buffers and pipeline states before abandoning the
429 // GrVkDescriptorSetManagers
430 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400431 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700432 }
433 fDescriptorSetManagers.reset();
434
jvanverth4c6e47a2016-07-22 10:34:52 -0700435 // release our uniform buffers
436 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
437 SkASSERT(fAvailableUniformBufferResources[i]->unique());
438 fAvailableUniformBufferResources[i]->unrefAndAbandon();
439 }
440 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800441}
egdanield62e28b2016-06-07 08:43:30 -0700442
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500443void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
444 SkASSERT(pool->unique());
445 pool->releaseResources(fGpu);
446 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
447 if (taskGroup) {
448 taskGroup->add([this, pool]() {
449 this->reset(pool);
450 });
451 } else {
452 this->reset(pool);
453 }
454}
455
456void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
457 SkASSERT(pool->unique());
458 pool->reset(fGpu);
459 std::unique_lock<std::recursive_mutex> providerLock(fBackgroundMutex);
460 fAvailableCommandPools.push_back(pool);
461}
462
egdanield62e28b2016-06-07 08:43:30 -0700463////////////////////////////////////////////////////////////////////////////////
464
465GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
466 const GrVkGpu* gpu,
467 const GrVkRenderTarget& target)
468 : fLastReturnedIndex(0) {
469 fRenderPasses.emplace_back(new GrVkRenderPass());
470 fRenderPasses[0]->initSimple(gpu, target);
471}
472
473bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
474 const GrVkRenderTarget& target) const {
475 // The first GrVkRenderpass should always exists since we create the basic load store
476 // render pass on create
477 SkASSERT(fRenderPasses[0]);
478 return fRenderPasses[0]->isCompatible(target);
479}
480
egdaniel2feb0932016-06-08 06:48:09 -0700481GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
482 const GrVkGpu* gpu,
483 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700484 const GrVkRenderPass::LoadStoreOps& stencilOps) {
485 for (int i = 0; i < fRenderPasses.count(); ++i) {
486 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700487 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700488 fLastReturnedIndex = idx;
489 return fRenderPasses[idx];
490 }
491 }
egdaniel9cb63402016-06-23 08:37:05 -0700492 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700493 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700494 fLastReturnedIndex = fRenderPasses.count() - 1;
495 return renderPass;
496}
497
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500498void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(GrVkGpu* gpu) {
egdanield62e28b2016-06-07 08:43:30 -0700499 for (int i = 0; i < fRenderPasses.count(); ++i) {
500 if (fRenderPasses[i]) {
501 fRenderPasses[i]->unref(gpu);
502 fRenderPasses[i] = nullptr;
503 }
504 }
505}
506
507void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
508 for (int i = 0; i < fRenderPasses.count(); ++i) {
509 if (fRenderPasses[i]) {
510 fRenderPasses[i]->unrefAndAbandon();
511 fRenderPasses[i] = nullptr;
512 }
513 }
514}