blob: 9a219c88619337e375a8e9f56ebcdbb8bc9a8443 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Brian Salomon2bbdcc42017-09-07 12:36:34 -040010#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
egdanielbc9b2962016-09-27 08:00:53 -070012#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000013#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070015#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080016#include "GrVkSampler.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070017#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrVkUtil.h"
19
20#ifdef SK_TRACE_VK_RESOURCES
jvanverthd5f6e9a2016-07-07 08:21:48 -070021GrVkResource::Trace GrVkResource::fTrace;
egdaniel50ead532016-07-13 14:23:26 -070022uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050023#endif
24
egdaniel778555c2016-05-02 06:50:36 -070025GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
26 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070027 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070028 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050029}
30
31GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070032 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080033 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070034 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080035}
36
37void GrVkResourceProvider::init() {
38 VkPipelineCacheCreateInfo createInfo;
39 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
40 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
41 createInfo.pNext = nullptr;
42 createInfo.flags = 0;
43 createInfo.initialDataSize = 0;
44 createInfo.pInitialData = nullptr;
45 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
46 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
47 &fPipelineCache));
48 SkASSERT(VK_SUCCESS == result);
49 if (VK_SUCCESS != result) {
50 fPipelineCache = VK_NULL_HANDLE;
51 }
egdaniel778555c2016-05-02 06:50:36 -070052
egdaniel707bbd62016-07-26 07:19:47 -070053 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040054 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
55 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070056 SkASSERT(1 == fDescriptorSetManagers.count());
57 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050058}
59
60GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070061 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050062 const GrPrimitiveProcessor& primProc,
63 VkPipelineShaderStageCreateInfo* shaderStageInfo,
64 int shaderStageCount,
65 GrPrimitiveType primitiveType,
66 const GrVkRenderPass& renderPass,
67 VkPipelineLayout layout) {
68
csmartdaltonc633abb2016-11-01 08:55:55 -070069 return GrVkPipeline::Create(fGpu, pipeline, stencil, primProc, shaderStageInfo,
70 shaderStageCount, primitiveType, renderPass, layout,
71 fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050072}
73
egdanielbc9b2962016-09-27 08:00:53 -070074GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
75 const GrVkRenderTarget* dst,
76 VkPipelineShaderStageCreateInfo* shaderStageInfo,
77 VkPipelineLayout pipelineLayout) {
78 // Find or Create a compatible pipeline
79 GrVkCopyPipeline* pipeline = nullptr;
80 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
81 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
82 pipeline = fCopyPipelines[i];
83 }
84 }
85 if (!pipeline) {
86 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
87 pipelineLayout,
88 dst->numColorSamples(),
89 *dst->simpleRenderPass(),
90 fPipelineCache);
Greg Danielf3a4ef92018-03-01 11:34:59 -050091 if (!pipeline) {
92 return nullptr;
93 }
egdanielbc9b2962016-09-27 08:00:53 -070094 fCopyPipelines.push_back(pipeline);
95 }
96 SkASSERT(pipeline);
97 pipeline->ref();
98 return pipeline;
99}
Greg Daniel164a9f02016-02-22 09:56:40 -0500100
101// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700102// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500103// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700104const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700105GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
106 CompatibleRPHandle* compatibleHandle) {
107 for (int i = 0; i < fRenderPassArray.count(); ++i) {
108 if (fRenderPassArray[i].isCompatible(target)) {
109 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500110 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700111 if (compatibleHandle) {
112 *compatibleHandle = CompatibleRPHandle(i);
113 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500114 return renderPass;
115 }
116 }
117
egdanield62e28b2016-06-07 08:43:30 -0700118 const GrVkRenderPass* renderPass =
119 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
120 renderPass->ref();
121
122 if (compatibleHandle) {
123 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
124 }
125 return renderPass;
126}
127
128const GrVkRenderPass*
129GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
130 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
131 int index = compatibleHandle.toIndex();
132 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500133 renderPass->ref();
134 return renderPass;
135}
136
egdaniel2feb0932016-06-08 06:48:09 -0700137const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
138 const GrVkRenderTarget& target,
139 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700140 const GrVkRenderPass::LoadStoreOps& stencilOps,
141 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700142 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
143 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
144 : &tempRPHandle;
145 *pRPHandle = target.compatibleRenderPassHandle();
146
egdaniel2feb0932016-06-08 06:48:09 -0700147 // This will get us the handle to (and possible create) the compatible set for the specific
148 // GrVkRenderPass we are looking for.
149 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400150 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700151}
152
153const GrVkRenderPass*
154GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
155 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700156 const GrVkRenderPass::LoadStoreOps& stencilOps) {
157 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
158 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
159 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
160 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700161 stencilOps);
162 renderPass->ref();
163 return renderPass;
164}
165
Greg Daniel164a9f02016-02-22 09:56:40 -0500166GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700167 VkDescriptorType type, uint32_t count) {
168 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500169}
170
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400171GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrSamplerState& params,
Greg Danielb280d4e2017-09-01 09:40:30 -0400172 uint32_t maxMipLevel) {
173 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, maxMipLevel));
egdaniel8b6394c2016-03-04 07:35:10 -0800174 if (!sampler) {
Greg Danielb280d4e2017-09-01 09:40:30 -0400175 sampler = GrVkSampler::Create(fGpu, params, maxMipLevel);
egdaniel8b6394c2016-03-04 07:35:10 -0800176 fSamplers.add(sampler);
177 }
178 SkASSERT(sampler);
179 sampler->ref();
180 return sampler;
181}
182
Greg Daniel09eeefb2017-10-16 15:15:02 -0400183GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700184 const GrPipeline& pipeline,
185 const GrPrimitiveProcessor& proc,
186 GrPrimitiveType primitiveType,
187 const GrVkRenderPass& renderPass) {
188 return fPipelineStateCache->refPipelineState(pipeline, proc, primitiveType, renderPass);
189}
190
Greg Daniela7543782017-05-02 14:01:43 -0400191void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
192 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700193 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700194 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400195 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
196 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700197 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400198 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700199 *handle = GrVkDescriptorSetManager::Handle(i);
200 return;
201 }
202 }
203
Greg Daniel18f96022017-05-04 15:09:03 -0400204 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
205 uniformHandler);
206 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700207 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
208}
209
Greg Daniela7543782017-05-02 14:01:43 -0400210void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
211 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700212 GrVkDescriptorSetManager::Handle* handle) {
213 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400214 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
215 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700216 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400217 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700218 *handle = GrVkDescriptorSetManager::Handle(i);
219 return;
220 }
221 }
222
Greg Daniel18f96022017-05-04 15:09:03 -0400223 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
224 visibilities);
225 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700226 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
227}
228
egdaniel707bbd62016-07-26 07:19:47 -0700229VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
230 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400231 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700232}
233
234VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
235 const GrVkDescriptorSetManager::Handle& handle) const {
236 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400237 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700238}
239
egdaniela95220d2016-07-21 11:50:37 -0700240const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
241 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400242 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
243 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700244}
245
egdaniel707bbd62016-07-26 07:19:47 -0700246const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
247 const GrVkDescriptorSetManager::Handle& handle) {
248 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400249 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700250}
egdaniela95220d2016-07-21 11:50:37 -0700251
252void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
253 const GrVkDescriptorSetManager::Handle& handle) {
254 SkASSERT(descSet);
255 SkASSERT(handle.isValid());
256 int managerIdx = handle.toIndex();
257 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400258 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700259}
260
jvanverth7ec92412016-07-06 09:24:57 -0700261GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
262 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
263 int count = fAvailableCommandBuffers.count();
264 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700265 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700266 SkASSERT(cmdBuffer->finished(fGpu));
267 fAvailableCommandBuffers.removeShuffle(count - 1);
268 } else {
269 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
270 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500271 fActiveCommandBuffers.push_back(cmdBuffer);
272 cmdBuffer->ref();
273 return cmdBuffer;
274}
275
276void GrVkResourceProvider::checkCommandBuffers() {
277 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
278 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700279 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
280 cmdBuffer->reset(fGpu);
281 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500282 fActiveCommandBuffers.removeShuffle(i);
283 }
284 }
285}
286
jvanverth7ec92412016-07-06 09:24:57 -0700287GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
288 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
289 int count = fAvailableSecondaryCommandBuffers.count();
290 if (count > 0) {
291 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
292 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
293 } else {
294 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
295 }
296 return cmdBuffer;
297}
298
299void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
300 cb->reset(fGpu);
301 fAvailableSecondaryCommandBuffers.push_back(cb);
302}
303
jvanverth4c6e47a2016-07-22 10:34:52 -0700304const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
305 const GrVkResource* resource = nullptr;
306 int count = fAvailableUniformBufferResources.count();
307 if (count > 0) {
308 resource = fAvailableUniformBufferResources[count - 1];
309 fAvailableUniformBufferResources.removeShuffle(count - 1);
310 } else {
311 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
312 }
313 return resource;
314}
315
316void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
317 fAvailableUniformBufferResources.push_back(resource);
318}
319
Jim Van Verth09557d72016-11-07 11:10:21 -0500320void GrVkResourceProvider::destroyResources(bool deviceLost) {
jvanverth7ec92412016-07-06 09:24:57 -0700321 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500322 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
Jim Van Verth09557d72016-11-07 11:10:21 -0500323 SkASSERT(deviceLost || fActiveCommandBuffers[i]->finished(fGpu));
Greg Daniel164a9f02016-02-22 09:56:40 -0500324 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700325 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500326 fActiveCommandBuffers[i]->unref(fGpu);
327 }
328 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700329 // release our available command buffers
330 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
Jim Van Verth09557d72016-11-07 11:10:21 -0500331 SkASSERT(deviceLost || fAvailableCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700332 SkASSERT(fAvailableCommandBuffers[i]->unique());
333 fAvailableCommandBuffers[i]->unref(fGpu);
334 }
335 fAvailableCommandBuffers.reset();
336
337 // release our available secondary command buffers
338 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
339 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
340 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
341 }
342 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500343
egdanielbc9b2962016-09-27 08:00:53 -0700344 // Release all copy pipelines
345 for (int i = 0; i < fCopyPipelines.count(); ++i) {
346 fCopyPipelines[i]->unref(fGpu);
347 }
348
egdanield62e28b2016-06-07 08:43:30 -0700349 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
350 for (int i = 0; i < fRenderPassArray.count(); ++i) {
351 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500352 }
egdanield62e28b2016-06-07 08:43:30 -0700353 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500354
egdaniel8b6394c2016-03-04 07:35:10 -0800355 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700356 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800357 for (; !iter.done(); ++iter) {
358 (*iter).unref(fGpu);
359 }
360 fSamplers.reset();
361
egdaniel22281c12016-03-23 13:49:40 -0700362 fPipelineStateCache->release();
363
jvanverth03509ea2016-03-02 13:19:47 -0800364 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
365 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700366
egdaniela95220d2016-07-21 11:50:37 -0700367 // We must release/destroy all command buffers and pipeline states before releasing the
368 // GrVkDescriptorSetManagers
369 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400370 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700371 }
372 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700373
374 // release our uniform buffers
375 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
376 SkASSERT(fAvailableUniformBufferResources[i]->unique());
377 fAvailableUniformBufferResources[i]->unref(fGpu);
378 }
379 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500380}
381
382void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700383 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500384 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
385 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700386 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500387 fActiveCommandBuffers[i]->unrefAndAbandon();
388 }
389 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700390 // release our available command buffers
391 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
392 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
393 SkASSERT(fAvailableCommandBuffers[i]->unique());
394 fAvailableCommandBuffers[i]->unrefAndAbandon();
395 }
396 fAvailableCommandBuffers.reset();
397
398 // release our available secondary command buffers
399 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
400 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
401 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
402 }
403 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500404
egdanielbc9b2962016-09-27 08:00:53 -0700405 // Abandon all copy pipelines
406 for (int i = 0; i < fCopyPipelines.count(); ++i) {
407 fCopyPipelines[i]->unrefAndAbandon();
408 }
409
egdanield62e28b2016-06-07 08:43:30 -0700410 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
411 for (int i = 0; i < fRenderPassArray.count(); ++i) {
412 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500413 }
egdanield62e28b2016-06-07 08:43:30 -0700414 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500415
egdaniel8b6394c2016-03-04 07:35:10 -0800416 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700417 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800418 for (; !iter.done(); ++iter) {
419 (*iter).unrefAndAbandon();
420 }
421 fSamplers.reset();
422
egdaniel22281c12016-03-23 13:49:40 -0700423 fPipelineStateCache->abandon();
424
jvanverth03509ea2016-03-02 13:19:47 -0800425 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700426
egdaniela95220d2016-07-21 11:50:37 -0700427 // We must abandon all command buffers and pipeline states before abandoning the
428 // GrVkDescriptorSetManagers
429 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400430 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700431 }
432 fDescriptorSetManagers.reset();
433
jvanverth4c6e47a2016-07-22 10:34:52 -0700434 // release our uniform buffers
435 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
436 SkASSERT(fAvailableUniformBufferResources[i]->unique());
437 fAvailableUniformBufferResources[i]->unrefAndAbandon();
438 }
439 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800440}
egdanield62e28b2016-06-07 08:43:30 -0700441
442////////////////////////////////////////////////////////////////////////////////
443
444GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
445 const GrVkGpu* gpu,
446 const GrVkRenderTarget& target)
447 : fLastReturnedIndex(0) {
448 fRenderPasses.emplace_back(new GrVkRenderPass());
449 fRenderPasses[0]->initSimple(gpu, target);
450}
451
452bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
453 const GrVkRenderTarget& target) const {
454 // The first GrVkRenderpass should always exists since we create the basic load store
455 // render pass on create
456 SkASSERT(fRenderPasses[0]);
457 return fRenderPasses[0]->isCompatible(target);
458}
459
egdaniel2feb0932016-06-08 06:48:09 -0700460GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
461 const GrVkGpu* gpu,
462 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700463 const GrVkRenderPass::LoadStoreOps& stencilOps) {
464 for (int i = 0; i < fRenderPasses.count(); ++i) {
465 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700466 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700467 fLastReturnedIndex = idx;
468 return fRenderPasses[idx];
469 }
470 }
egdaniel9cb63402016-06-23 08:37:05 -0700471 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700472 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700473 fLastReturnedIndex = fRenderPasses.count() - 1;
474 return renderPass;
475}
476
egdanield62e28b2016-06-07 08:43:30 -0700477void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
478 for (int i = 0; i < fRenderPasses.count(); ++i) {
479 if (fRenderPasses[i]) {
480 fRenderPasses[i]->unref(gpu);
481 fRenderPasses[i] = nullptr;
482 }
483 }
484}
485
486void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
487 for (int i = 0; i < fRenderPasses.count(); ++i) {
488 if (fRenderPasses[i]) {
489 fRenderPasses[i]->unrefAndAbandon();
490 fRenderPasses[i] = nullptr;
491 }
492 }
493}