blob: 75a459a73301274535a915670c31de9a74cf2bf2 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Brian Salomon2bbdcc42017-09-07 12:36:34 -040010#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
egdanielbc9b2962016-09-27 08:00:53 -070012#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000013#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070015#include "GrVkRenderTarget.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070016#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050017#include "GrVkUtil.h"
18
19#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050020std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050021#endif
22
egdaniel778555c2016-05-02 06:50:36 -070023GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
24 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070025 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070026 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050027}
28
29GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070030 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080031 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070032 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080033}
34
35void GrVkResourceProvider::init() {
36 VkPipelineCacheCreateInfo createInfo;
37 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
38 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
39 createInfo.pNext = nullptr;
40 createInfo.flags = 0;
41 createInfo.initialDataSize = 0;
42 createInfo.pInitialData = nullptr;
43 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
44 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
45 &fPipelineCache));
46 SkASSERT(VK_SUCCESS == result);
47 if (VK_SUCCESS != result) {
48 fPipelineCache = VK_NULL_HANDLE;
49 }
egdaniel778555c2016-05-02 06:50:36 -070050
egdaniel707bbd62016-07-26 07:19:47 -070051 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040052 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
53 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070054 SkASSERT(1 == fDescriptorSetManagers.count());
55 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050056}
57
Brian Salomonff168d92018-06-23 15:17:27 -040058GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPrimitiveProcessor& primProc,
59 const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070060 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050061 VkPipelineShaderStageCreateInfo* shaderStageInfo,
62 int shaderStageCount,
63 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -040064 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -050065 VkPipelineLayout layout) {
Brian Salomonff168d92018-06-23 15:17:27 -040066 return GrVkPipeline::Create(fGpu, primProc, pipeline, stencil, shaderStageInfo,
Greg Daniel99b88e02018-10-03 15:31:20 -040067 shaderStageCount, primitiveType, compatibleRenderPass, layout,
csmartdaltonc633abb2016-11-01 08:55:55 -070068 fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050069}
70
egdanielbc9b2962016-09-27 08:00:53 -070071GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
72 const GrVkRenderTarget* dst,
73 VkPipelineShaderStageCreateInfo* shaderStageInfo,
74 VkPipelineLayout pipelineLayout) {
75 // Find or Create a compatible pipeline
76 GrVkCopyPipeline* pipeline = nullptr;
77 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
78 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
79 pipeline = fCopyPipelines[i];
80 }
81 }
82 if (!pipeline) {
83 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
84 pipelineLayout,
85 dst->numColorSamples(),
86 *dst->simpleRenderPass(),
87 fPipelineCache);
Greg Danielf3a4ef92018-03-01 11:34:59 -050088 if (!pipeline) {
89 return nullptr;
90 }
egdanielbc9b2962016-09-27 08:00:53 -070091 fCopyPipelines.push_back(pipeline);
92 }
93 SkASSERT(pipeline);
94 pipeline->ref();
95 return pipeline;
96}
Greg Daniel164a9f02016-02-22 09:56:40 -050097
98// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -070099// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500100// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700101const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700102GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
103 CompatibleRPHandle* compatibleHandle) {
104 for (int i = 0; i < fRenderPassArray.count(); ++i) {
105 if (fRenderPassArray[i].isCompatible(target)) {
106 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500107 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700108 if (compatibleHandle) {
109 *compatibleHandle = CompatibleRPHandle(i);
110 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500111 return renderPass;
112 }
113 }
114
egdanield62e28b2016-06-07 08:43:30 -0700115 const GrVkRenderPass* renderPass =
116 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
117 renderPass->ref();
118
119 if (compatibleHandle) {
120 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
121 }
122 return renderPass;
123}
124
125const GrVkRenderPass*
126GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
127 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
128 int index = compatibleHandle.toIndex();
129 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500130 renderPass->ref();
131 return renderPass;
132}
133
egdaniel2feb0932016-06-08 06:48:09 -0700134const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
135 const GrVkRenderTarget& target,
136 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700137 const GrVkRenderPass::LoadStoreOps& stencilOps,
138 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700139 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
140 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
141 : &tempRPHandle;
142 *pRPHandle = target.compatibleRenderPassHandle();
143
egdaniel2feb0932016-06-08 06:48:09 -0700144 // This will get us the handle to (and possible create) the compatible set for the specific
145 // GrVkRenderPass we are looking for.
146 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400147 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700148}
149
150const GrVkRenderPass*
151GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
152 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700153 const GrVkRenderPass::LoadStoreOps& stencilOps) {
154 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
155 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
156 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
157 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700158 stencilOps);
159 renderPass->ref();
160 return renderPass;
161}
162
Greg Daniel164a9f02016-02-22 09:56:40 -0500163GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700164 VkDescriptorType type, uint32_t count) {
165 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500166}
167
Greg Daniel7e000222018-12-03 10:08:21 -0500168GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
169 const GrSamplerState& params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
170 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800171 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500172 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
173 if (!sampler) {
174 return nullptr;
175 }
egdaniel8b6394c2016-03-04 07:35:10 -0800176 fSamplers.add(sampler);
177 }
178 SkASSERT(sampler);
179 sampler->ref();
180 return sampler;
181}
182
Greg Daniel7e000222018-12-03 10:08:21 -0500183GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
184 const GrVkYcbcrConversionInfo& ycbcrInfo) {
185 GrVkSamplerYcbcrConversion* ycbcrConversion =
186 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
187 if (!ycbcrConversion) {
188 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
189 if (!ycbcrConversion) {
190 return nullptr;
191 }
192 fYcbcrConversions.add(ycbcrConversion);
193 }
194 SkASSERT(ycbcrConversion);
195 ycbcrConversion->ref();
196 return ycbcrConversion;
197}
198
Greg Daniel09eeefb2017-10-16 15:15:02 -0400199GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Greg Daniel9a51a862018-11-30 10:18:14 -0500200 const GrPipeline& pipeline, const GrPrimitiveProcessor& proc,
201 const GrTextureProxy* const primProcProxies[], GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400202 VkRenderPass compatibleRenderPass) {
Greg Daniel9a51a862018-11-30 10:18:14 -0500203 return fPipelineStateCache->refPipelineState(proc, primProcProxies, pipeline, primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400204 compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700205}
206
Greg Daniela7543782017-05-02 14:01:43 -0400207void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
208 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700209 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700210 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400211 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
212 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700213 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400214 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700215 *handle = GrVkDescriptorSetManager::Handle(i);
216 return;
217 }
218 }
219
Greg Daniel18f96022017-05-04 15:09:03 -0400220 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
221 uniformHandler);
222 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700223 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
224}
225
Greg Daniela7543782017-05-02 14:01:43 -0400226void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
227 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700228 GrVkDescriptorSetManager::Handle* handle) {
229 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400230 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
231 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700232 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400233 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700234 *handle = GrVkDescriptorSetManager::Handle(i);
235 return;
236 }
237 }
238
Greg Daniel18f96022017-05-04 15:09:03 -0400239 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
240 visibilities);
241 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700242 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
243}
244
egdaniel707bbd62016-07-26 07:19:47 -0700245VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
246 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400247 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700248}
249
250VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
251 const GrVkDescriptorSetManager::Handle& handle) const {
252 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400253 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700254}
255
egdaniela95220d2016-07-21 11:50:37 -0700256const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
257 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400258 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
259 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700260}
261
egdaniel707bbd62016-07-26 07:19:47 -0700262const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
263 const GrVkDescriptorSetManager::Handle& handle) {
264 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400265 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700266}
egdaniela95220d2016-07-21 11:50:37 -0700267
268void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
269 const GrVkDescriptorSetManager::Handle& handle) {
270 SkASSERT(descSet);
271 SkASSERT(handle.isValid());
272 int managerIdx = handle.toIndex();
273 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400274 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700275}
276
jvanverth7ec92412016-07-06 09:24:57 -0700277GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
278 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
279 int count = fAvailableCommandBuffers.count();
280 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700281 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700282 SkASSERT(cmdBuffer->finished(fGpu));
283 fAvailableCommandBuffers.removeShuffle(count - 1);
284 } else {
285 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
286 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500287 fActiveCommandBuffers.push_back(cmdBuffer);
288 cmdBuffer->ref();
289 return cmdBuffer;
290}
291
292void GrVkResourceProvider::checkCommandBuffers() {
293 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
294 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700295 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
296 cmdBuffer->reset(fGpu);
297 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500298 fActiveCommandBuffers.removeShuffle(i);
299 }
300 }
301}
302
jvanverth7ec92412016-07-06 09:24:57 -0700303GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
304 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
305 int count = fAvailableSecondaryCommandBuffers.count();
306 if (count > 0) {
307 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
308 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
309 } else {
310 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
311 }
312 return cmdBuffer;
313}
314
315void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
316 cb->reset(fGpu);
317 fAvailableSecondaryCommandBuffers.push_back(cb);
318}
319
jvanverth4c6e47a2016-07-22 10:34:52 -0700320const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
321 const GrVkResource* resource = nullptr;
322 int count = fAvailableUniformBufferResources.count();
323 if (count > 0) {
324 resource = fAvailableUniformBufferResources[count - 1];
325 fAvailableUniformBufferResources.removeShuffle(count - 1);
326 } else {
327 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
328 }
329 return resource;
330}
331
332void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
333 fAvailableUniformBufferResources.push_back(resource);
334}
335
Jim Van Verth09557d72016-11-07 11:10:21 -0500336void GrVkResourceProvider::destroyResources(bool deviceLost) {
jvanverth7ec92412016-07-06 09:24:57 -0700337 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500338 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
Jim Van Verth09557d72016-11-07 11:10:21 -0500339 SkASSERT(deviceLost || fActiveCommandBuffers[i]->finished(fGpu));
Greg Daniel164a9f02016-02-22 09:56:40 -0500340 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700341 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500342 fActiveCommandBuffers[i]->unref(fGpu);
343 }
344 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700345 // release our available command buffers
346 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
Jim Van Verth09557d72016-11-07 11:10:21 -0500347 SkASSERT(deviceLost || fAvailableCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700348 SkASSERT(fAvailableCommandBuffers[i]->unique());
349 fAvailableCommandBuffers[i]->unref(fGpu);
350 }
351 fAvailableCommandBuffers.reset();
352
353 // release our available secondary command buffers
354 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
355 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
356 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
357 }
358 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500359
egdanielbc9b2962016-09-27 08:00:53 -0700360 // Release all copy pipelines
361 for (int i = 0; i < fCopyPipelines.count(); ++i) {
362 fCopyPipelines[i]->unref(fGpu);
363 }
364
egdanield62e28b2016-06-07 08:43:30 -0700365 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
366 for (int i = 0; i < fRenderPassArray.count(); ++i) {
367 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500368 }
egdanield62e28b2016-06-07 08:43:30 -0700369 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500370
egdaniel8b6394c2016-03-04 07:35:10 -0800371 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500372 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800373 for (; !iter.done(); ++iter) {
374 (*iter).unref(fGpu);
375 }
376 fSamplers.reset();
377
egdaniel22281c12016-03-23 13:49:40 -0700378 fPipelineStateCache->release();
379
jvanverth03509ea2016-03-02 13:19:47 -0800380 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
381 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700382
egdaniela95220d2016-07-21 11:50:37 -0700383 // We must release/destroy all command buffers and pipeline states before releasing the
384 // GrVkDescriptorSetManagers
385 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400386 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700387 }
388 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700389
390 // release our uniform buffers
391 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
392 SkASSERT(fAvailableUniformBufferResources[i]->unique());
393 fAvailableUniformBufferResources[i]->unref(fGpu);
394 }
395 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500396}
397
398void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700399 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500400 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
jvanverth7ec92412016-07-06 09:24:57 -0700401 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500402 fActiveCommandBuffers[i]->unrefAndAbandon();
403 }
404 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700405 // release our available command buffers
406 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
jvanverth7ec92412016-07-06 09:24:57 -0700407 SkASSERT(fAvailableCommandBuffers[i]->unique());
408 fAvailableCommandBuffers[i]->unrefAndAbandon();
409 }
410 fAvailableCommandBuffers.reset();
411
412 // release our available secondary command buffers
413 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
414 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
415 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
416 }
417 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500418
egdanielbc9b2962016-09-27 08:00:53 -0700419 // Abandon all copy pipelines
420 for (int i = 0; i < fCopyPipelines.count(); ++i) {
421 fCopyPipelines[i]->unrefAndAbandon();
422 }
423
egdanield62e28b2016-06-07 08:43:30 -0700424 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
425 for (int i = 0; i < fRenderPassArray.count(); ++i) {
426 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500427 }
egdanield62e28b2016-06-07 08:43:30 -0700428 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500429
egdaniel8b6394c2016-03-04 07:35:10 -0800430 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500431 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800432 for (; !iter.done(); ++iter) {
433 (*iter).unrefAndAbandon();
434 }
435 fSamplers.reset();
436
egdaniel22281c12016-03-23 13:49:40 -0700437 fPipelineStateCache->abandon();
438
jvanverth03509ea2016-03-02 13:19:47 -0800439 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700440
egdaniela95220d2016-07-21 11:50:37 -0700441 // We must abandon all command buffers and pipeline states before abandoning the
442 // GrVkDescriptorSetManagers
443 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400444 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700445 }
446 fDescriptorSetManagers.reset();
447
jvanverth4c6e47a2016-07-22 10:34:52 -0700448 // release our uniform buffers
449 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
450 SkASSERT(fAvailableUniformBufferResources[i]->unique());
451 fAvailableUniformBufferResources[i]->unrefAndAbandon();
452 }
453 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800454}
egdanield62e28b2016-06-07 08:43:30 -0700455
456////////////////////////////////////////////////////////////////////////////////
457
458GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
459 const GrVkGpu* gpu,
460 const GrVkRenderTarget& target)
461 : fLastReturnedIndex(0) {
462 fRenderPasses.emplace_back(new GrVkRenderPass());
463 fRenderPasses[0]->initSimple(gpu, target);
464}
465
466bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
467 const GrVkRenderTarget& target) const {
468 // The first GrVkRenderpass should always exists since we create the basic load store
469 // render pass on create
470 SkASSERT(fRenderPasses[0]);
471 return fRenderPasses[0]->isCompatible(target);
472}
473
egdaniel2feb0932016-06-08 06:48:09 -0700474GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
475 const GrVkGpu* gpu,
476 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700477 const GrVkRenderPass::LoadStoreOps& stencilOps) {
478 for (int i = 0; i < fRenderPasses.count(); ++i) {
479 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700480 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700481 fLastReturnedIndex = idx;
482 return fRenderPasses[idx];
483 }
484 }
egdaniel9cb63402016-06-23 08:37:05 -0700485 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700486 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700487 fLastReturnedIndex = fRenderPasses.count() - 1;
488 return renderPass;
489}
490
egdanield62e28b2016-06-07 08:43:30 -0700491void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
492 for (int i = 0; i < fRenderPasses.count(); ++i) {
493 if (fRenderPasses[i]) {
494 fRenderPasses[i]->unref(gpu);
495 fRenderPasses[i] = nullptr;
496 }
497 }
498}
499
500void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
501 for (int i = 0; i < fRenderPasses.count(); ++i) {
502 if (fRenderPasses[i]) {
503 fRenderPasses[i]->unrefAndAbandon();
504 fRenderPasses[i] = nullptr;
505 }
506 }
507}