blob: de1b975d255dbeff5559798432187476fdf34c1c [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Brian Salomon2bbdcc42017-09-07 12:36:34 -040010#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
egdanielbc9b2962016-09-27 08:00:53 -070012#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000013#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070015#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080016#include "GrVkSampler.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070017#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrVkUtil.h"
19
20#ifdef SK_TRACE_VK_RESOURCES
egdaniel50ead532016-07-13 14:23:26 -070021uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050022#endif
23
egdaniel778555c2016-05-02 06:50:36 -070024GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
25 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070026 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070027 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050028}
29
30GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070031 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080032 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070033 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080034}
35
36void GrVkResourceProvider::init() {
37 VkPipelineCacheCreateInfo createInfo;
38 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
39 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
40 createInfo.pNext = nullptr;
41 createInfo.flags = 0;
42 createInfo.initialDataSize = 0;
43 createInfo.pInitialData = nullptr;
44 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
45 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
46 &fPipelineCache));
47 SkASSERT(VK_SUCCESS == result);
48 if (VK_SUCCESS != result) {
49 fPipelineCache = VK_NULL_HANDLE;
50 }
egdaniel778555c2016-05-02 06:50:36 -070051
egdaniel707bbd62016-07-26 07:19:47 -070052 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040053 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
54 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070055 SkASSERT(1 == fDescriptorSetManagers.count());
56 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050057}
58
Brian Salomonff168d92018-06-23 15:17:27 -040059GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPrimitiveProcessor& primProc,
60 const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070061 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050062 VkPipelineShaderStageCreateInfo* shaderStageInfo,
63 int shaderStageCount,
64 GrPrimitiveType primitiveType,
65 const GrVkRenderPass& renderPass,
66 VkPipelineLayout layout) {
Brian Salomonff168d92018-06-23 15:17:27 -040067 return GrVkPipeline::Create(fGpu, primProc, pipeline, stencil, shaderStageInfo,
csmartdaltonc633abb2016-11-01 08:55:55 -070068 shaderStageCount, primitiveType, renderPass, layout,
69 fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050070}
71
egdanielbc9b2962016-09-27 08:00:53 -070072GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
73 const GrVkRenderTarget* dst,
74 VkPipelineShaderStageCreateInfo* shaderStageInfo,
75 VkPipelineLayout pipelineLayout) {
76 // Find or Create a compatible pipeline
77 GrVkCopyPipeline* pipeline = nullptr;
78 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
79 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
80 pipeline = fCopyPipelines[i];
81 }
82 }
83 if (!pipeline) {
84 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
85 pipelineLayout,
86 dst->numColorSamples(),
87 *dst->simpleRenderPass(),
88 fPipelineCache);
Greg Danielf3a4ef92018-03-01 11:34:59 -050089 if (!pipeline) {
90 return nullptr;
91 }
egdanielbc9b2962016-09-27 08:00:53 -070092 fCopyPipelines.push_back(pipeline);
93 }
94 SkASSERT(pipeline);
95 pipeline->ref();
96 return pipeline;
97}
Greg Daniel164a9f02016-02-22 09:56:40 -050098
99// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700100// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500101// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700102const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700103GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
104 CompatibleRPHandle* compatibleHandle) {
105 for (int i = 0; i < fRenderPassArray.count(); ++i) {
106 if (fRenderPassArray[i].isCompatible(target)) {
107 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500108 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700109 if (compatibleHandle) {
110 *compatibleHandle = CompatibleRPHandle(i);
111 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500112 return renderPass;
113 }
114 }
115
egdanield62e28b2016-06-07 08:43:30 -0700116 const GrVkRenderPass* renderPass =
117 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
118 renderPass->ref();
119
120 if (compatibleHandle) {
121 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
122 }
123 return renderPass;
124}
125
126const GrVkRenderPass*
127GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
128 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
129 int index = compatibleHandle.toIndex();
130 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500131 renderPass->ref();
132 return renderPass;
133}
134
egdaniel2feb0932016-06-08 06:48:09 -0700135const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
136 const GrVkRenderTarget& target,
137 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700138 const GrVkRenderPass::LoadStoreOps& stencilOps,
139 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700140 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
141 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
142 : &tempRPHandle;
143 *pRPHandle = target.compatibleRenderPassHandle();
144
egdaniel2feb0932016-06-08 06:48:09 -0700145 // This will get us the handle to (and possible create) the compatible set for the specific
146 // GrVkRenderPass we are looking for.
147 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400148 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700149}
150
151const GrVkRenderPass*
152GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
153 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700154 const GrVkRenderPass::LoadStoreOps& stencilOps) {
155 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
156 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
157 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
158 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700159 stencilOps);
160 renderPass->ref();
161 return renderPass;
162}
163
Greg Daniel164a9f02016-02-22 09:56:40 -0500164GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700165 VkDescriptorType type, uint32_t count) {
166 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500167}
168
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400169GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrSamplerState& params,
Greg Danielb280d4e2017-09-01 09:40:30 -0400170 uint32_t maxMipLevel) {
171 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, maxMipLevel));
egdaniel8b6394c2016-03-04 07:35:10 -0800172 if (!sampler) {
Greg Danielb280d4e2017-09-01 09:40:30 -0400173 sampler = GrVkSampler::Create(fGpu, params, maxMipLevel);
egdaniel8b6394c2016-03-04 07:35:10 -0800174 fSamplers.add(sampler);
175 }
176 SkASSERT(sampler);
177 sampler->ref();
178 return sampler;
179}
180
Greg Daniel09eeefb2017-10-16 15:15:02 -0400181GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700182 const GrPipeline& pipeline,
183 const GrPrimitiveProcessor& proc,
184 GrPrimitiveType primitiveType,
185 const GrVkRenderPass& renderPass) {
Brian Salomonff168d92018-06-23 15:17:27 -0400186 return fPipelineStateCache->refPipelineState(proc, pipeline, primitiveType, renderPass);
egdaniel22281c12016-03-23 13:49:40 -0700187}
188
Greg Daniela7543782017-05-02 14:01:43 -0400189void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
190 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700191 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700192 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400193 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
194 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700195 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400196 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700197 *handle = GrVkDescriptorSetManager::Handle(i);
198 return;
199 }
200 }
201
Greg Daniel18f96022017-05-04 15:09:03 -0400202 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
203 uniformHandler);
204 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700205 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
206}
207
Greg Daniela7543782017-05-02 14:01:43 -0400208void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
209 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700210 GrVkDescriptorSetManager::Handle* handle) {
211 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400212 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
213 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700214 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400215 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700216 *handle = GrVkDescriptorSetManager::Handle(i);
217 return;
218 }
219 }
220
Greg Daniel18f96022017-05-04 15:09:03 -0400221 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
222 visibilities);
223 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700224 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
225}
226
egdaniel707bbd62016-07-26 07:19:47 -0700227VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
228 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400229 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700230}
231
232VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
233 const GrVkDescriptorSetManager::Handle& handle) const {
234 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400235 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700236}
237
egdaniela95220d2016-07-21 11:50:37 -0700238const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
239 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400240 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
241 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700242}
243
egdaniel707bbd62016-07-26 07:19:47 -0700244const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
245 const GrVkDescriptorSetManager::Handle& handle) {
246 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400247 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700248}
egdaniela95220d2016-07-21 11:50:37 -0700249
250void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
251 const GrVkDescriptorSetManager::Handle& handle) {
252 SkASSERT(descSet);
253 SkASSERT(handle.isValid());
254 int managerIdx = handle.toIndex();
255 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400256 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700257}
258
jvanverth7ec92412016-07-06 09:24:57 -0700259GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
260 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
261 int count = fAvailableCommandBuffers.count();
262 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700263 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700264 SkASSERT(cmdBuffer->finished(fGpu));
265 fAvailableCommandBuffers.removeShuffle(count - 1);
266 } else {
267 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
268 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500269 fActiveCommandBuffers.push_back(cmdBuffer);
270 cmdBuffer->ref();
271 return cmdBuffer;
272}
273
274void GrVkResourceProvider::checkCommandBuffers() {
275 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
276 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700277 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
278 cmdBuffer->reset(fGpu);
279 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500280 fActiveCommandBuffers.removeShuffle(i);
281 }
282 }
283}
284
jvanverth7ec92412016-07-06 09:24:57 -0700285GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
286 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
287 int count = fAvailableSecondaryCommandBuffers.count();
288 if (count > 0) {
289 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
290 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
291 } else {
292 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
293 }
294 return cmdBuffer;
295}
296
297void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
298 cb->reset(fGpu);
299 fAvailableSecondaryCommandBuffers.push_back(cb);
300}
301
jvanverth4c6e47a2016-07-22 10:34:52 -0700302const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
303 const GrVkResource* resource = nullptr;
304 int count = fAvailableUniformBufferResources.count();
305 if (count > 0) {
306 resource = fAvailableUniformBufferResources[count - 1];
307 fAvailableUniformBufferResources.removeShuffle(count - 1);
308 } else {
309 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
310 }
311 return resource;
312}
313
314void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
315 fAvailableUniformBufferResources.push_back(resource);
316}
317
Jim Van Verth09557d72016-11-07 11:10:21 -0500318void GrVkResourceProvider::destroyResources(bool deviceLost) {
jvanverth7ec92412016-07-06 09:24:57 -0700319 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500320 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
Jim Van Verth09557d72016-11-07 11:10:21 -0500321 SkASSERT(deviceLost || fActiveCommandBuffers[i]->finished(fGpu));
Greg Daniel164a9f02016-02-22 09:56:40 -0500322 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700323 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500324 fActiveCommandBuffers[i]->unref(fGpu);
325 }
326 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700327 // release our available command buffers
328 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
Jim Van Verth09557d72016-11-07 11:10:21 -0500329 SkASSERT(deviceLost || fAvailableCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700330 SkASSERT(fAvailableCommandBuffers[i]->unique());
331 fAvailableCommandBuffers[i]->unref(fGpu);
332 }
333 fAvailableCommandBuffers.reset();
334
335 // release our available secondary command buffers
336 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
337 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
338 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
339 }
340 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500341
egdanielbc9b2962016-09-27 08:00:53 -0700342 // Release all copy pipelines
343 for (int i = 0; i < fCopyPipelines.count(); ++i) {
344 fCopyPipelines[i]->unref(fGpu);
345 }
346
egdanield62e28b2016-06-07 08:43:30 -0700347 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
348 for (int i = 0; i < fRenderPassArray.count(); ++i) {
349 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500350 }
egdanield62e28b2016-06-07 08:43:30 -0700351 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500352
egdaniel8b6394c2016-03-04 07:35:10 -0800353 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700354 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800355 for (; !iter.done(); ++iter) {
356 (*iter).unref(fGpu);
357 }
358 fSamplers.reset();
359
egdaniel22281c12016-03-23 13:49:40 -0700360 fPipelineStateCache->release();
361
jvanverth03509ea2016-03-02 13:19:47 -0800362 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
363 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700364
egdaniela95220d2016-07-21 11:50:37 -0700365 // We must release/destroy all command buffers and pipeline states before releasing the
366 // GrVkDescriptorSetManagers
367 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400368 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700369 }
370 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700371
372 // release our uniform buffers
373 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
374 SkASSERT(fAvailableUniformBufferResources[i]->unique());
375 fAvailableUniformBufferResources[i]->unref(fGpu);
376 }
377 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500378}
379
380void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700381 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500382 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
jvanverth7ec92412016-07-06 09:24:57 -0700383 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500384 fActiveCommandBuffers[i]->unrefAndAbandon();
385 }
386 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700387 // release our available command buffers
388 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
jvanverth7ec92412016-07-06 09:24:57 -0700389 SkASSERT(fAvailableCommandBuffers[i]->unique());
390 fAvailableCommandBuffers[i]->unrefAndAbandon();
391 }
392 fAvailableCommandBuffers.reset();
393
394 // release our available secondary command buffers
395 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
396 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
397 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
398 }
399 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500400
egdanielbc9b2962016-09-27 08:00:53 -0700401 // Abandon all copy pipelines
402 for (int i = 0; i < fCopyPipelines.count(); ++i) {
403 fCopyPipelines[i]->unrefAndAbandon();
404 }
405
egdanield62e28b2016-06-07 08:43:30 -0700406 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
407 for (int i = 0; i < fRenderPassArray.count(); ++i) {
408 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500409 }
egdanield62e28b2016-06-07 08:43:30 -0700410 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500411
egdaniel8b6394c2016-03-04 07:35:10 -0800412 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700413 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800414 for (; !iter.done(); ++iter) {
415 (*iter).unrefAndAbandon();
416 }
417 fSamplers.reset();
418
egdaniel22281c12016-03-23 13:49:40 -0700419 fPipelineStateCache->abandon();
420
jvanverth03509ea2016-03-02 13:19:47 -0800421 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700422
egdaniela95220d2016-07-21 11:50:37 -0700423 // We must abandon all command buffers and pipeline states before abandoning the
424 // GrVkDescriptorSetManagers
425 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400426 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700427 }
428 fDescriptorSetManagers.reset();
429
jvanverth4c6e47a2016-07-22 10:34:52 -0700430 // release our uniform buffers
431 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
432 SkASSERT(fAvailableUniformBufferResources[i]->unique());
433 fAvailableUniformBufferResources[i]->unrefAndAbandon();
434 }
435 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800436}
egdanield62e28b2016-06-07 08:43:30 -0700437
438////////////////////////////////////////////////////////////////////////////////
439
440GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
441 const GrVkGpu* gpu,
442 const GrVkRenderTarget& target)
443 : fLastReturnedIndex(0) {
444 fRenderPasses.emplace_back(new GrVkRenderPass());
445 fRenderPasses[0]->initSimple(gpu, target);
446}
447
448bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
449 const GrVkRenderTarget& target) const {
450 // The first GrVkRenderpass should always exists since we create the basic load store
451 // render pass on create
452 SkASSERT(fRenderPasses[0]);
453 return fRenderPasses[0]->isCompatible(target);
454}
455
egdaniel2feb0932016-06-08 06:48:09 -0700456GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
457 const GrVkGpu* gpu,
458 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700459 const GrVkRenderPass::LoadStoreOps& stencilOps) {
460 for (int i = 0; i < fRenderPasses.count(); ++i) {
461 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700462 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700463 fLastReturnedIndex = idx;
464 return fRenderPasses[idx];
465 }
466 }
egdaniel9cb63402016-06-23 08:37:05 -0700467 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700468 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700469 fLastReturnedIndex = fRenderPasses.count() - 1;
470 return renderPass;
471}
472
egdanield62e28b2016-06-07 08:43:30 -0700473void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
474 for (int i = 0; i < fRenderPasses.count(); ++i) {
475 if (fRenderPasses[i]) {
476 fRenderPasses[i]->unref(gpu);
477 fRenderPasses[i] = nullptr;
478 }
479 }
480}
481
482void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
483 for (int i = 0; i < fRenderPasses.count(); ++i) {
484 if (fRenderPasses[i]) {
485 fRenderPasses[i]->unrefAndAbandon();
486 fRenderPasses[i] = nullptr;
487 }
488 }
489}