blob: 3027c2d3497741cafcd77f45d0d3dd419f0a2619 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
egdaniel8b6394c2016-03-04 07:35:10 -080010#include "GrTextureParams.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
egdanielbc9b2962016-09-27 08:00:53 -070012#include "GrVkCopyPipeline.h"
egdaniel707bbd62016-07-26 07:19:47 -070013#include "GrVkGLSLSampler.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070015#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080016#include "GrVkSampler.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070017#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrVkUtil.h"
19
20#ifdef SK_TRACE_VK_RESOURCES
jvanverthd5f6e9a2016-07-07 08:21:48 -070021GrVkResource::Trace GrVkResource::fTrace;
egdaniel50ead532016-07-13 14:23:26 -070022uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050023#endif
24
egdaniel778555c2016-05-02 06:50:36 -070025GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
26 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070027 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070028 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050029}
30
31GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070032 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080033 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070034 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080035}
36
37void GrVkResourceProvider::init() {
38 VkPipelineCacheCreateInfo createInfo;
39 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
40 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
41 createInfo.pNext = nullptr;
42 createInfo.flags = 0;
43 createInfo.initialDataSize = 0;
44 createInfo.pInitialData = nullptr;
45 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
46 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
47 &fPipelineCache));
48 SkASSERT(VK_SUCCESS == result);
49 if (VK_SUCCESS != result) {
50 fPipelineCache = VK_NULL_HANDLE;
51 }
egdaniel778555c2016-05-02 06:50:36 -070052
egdaniel707bbd62016-07-26 07:19:47 -070053 // Init uniform descriptor objects
54 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
55 SkASSERT(1 == fDescriptorSetManagers.count());
56 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050057}
58
59GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPipeline& pipeline,
60 const GrPrimitiveProcessor& primProc,
61 VkPipelineShaderStageCreateInfo* shaderStageInfo,
62 int shaderStageCount,
63 GrPrimitiveType primitiveType,
64 const GrVkRenderPass& renderPass,
65 VkPipelineLayout layout) {
66
67 return GrVkPipeline::Create(fGpu, pipeline, primProc, shaderStageInfo, shaderStageCount,
jvanverth03509ea2016-03-02 13:19:47 -080068 primitiveType, renderPass, layout, fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050069}
70
egdanielbc9b2962016-09-27 08:00:53 -070071GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
72 const GrVkRenderTarget* dst,
73 VkPipelineShaderStageCreateInfo* shaderStageInfo,
74 VkPipelineLayout pipelineLayout) {
75 // Find or Create a compatible pipeline
76 GrVkCopyPipeline* pipeline = nullptr;
77 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
78 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
79 pipeline = fCopyPipelines[i];
80 }
81 }
82 if (!pipeline) {
83 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
84 pipelineLayout,
85 dst->numColorSamples(),
86 *dst->simpleRenderPass(),
87 fPipelineCache);
88 fCopyPipelines.push_back(pipeline);
89 }
90 SkASSERT(pipeline);
91 pipeline->ref();
92 return pipeline;
93}
Greg Daniel164a9f02016-02-22 09:56:40 -050094
95// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -070096// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -050097// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -070098const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -070099GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
100 CompatibleRPHandle* compatibleHandle) {
101 for (int i = 0; i < fRenderPassArray.count(); ++i) {
102 if (fRenderPassArray[i].isCompatible(target)) {
103 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500104 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700105 if (compatibleHandle) {
106 *compatibleHandle = CompatibleRPHandle(i);
107 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500108 return renderPass;
109 }
110 }
111
egdanield62e28b2016-06-07 08:43:30 -0700112 const GrVkRenderPass* renderPass =
113 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
114 renderPass->ref();
115
116 if (compatibleHandle) {
117 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
118 }
119 return renderPass;
120}
121
122const GrVkRenderPass*
123GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
124 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
125 int index = compatibleHandle.toIndex();
126 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500127 renderPass->ref();
128 return renderPass;
129}
130
egdaniel2feb0932016-06-08 06:48:09 -0700131const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
132 const GrVkRenderTarget& target,
133 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700134 const GrVkRenderPass::LoadStoreOps& stencilOps,
135 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700136 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
137 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
138 : &tempRPHandle;
139 *pRPHandle = target.compatibleRenderPassHandle();
140
egdaniel2feb0932016-06-08 06:48:09 -0700141 // This will get us the handle to (and possible create) the compatible set for the specific
142 // GrVkRenderPass we are looking for.
143 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400144 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700145}
146
147const GrVkRenderPass*
148GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
149 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700150 const GrVkRenderPass::LoadStoreOps& stencilOps) {
151 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
152 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
153 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
154 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700155 stencilOps);
156 renderPass->ref();
157 return renderPass;
158}
159
Greg Daniel164a9f02016-02-22 09:56:40 -0500160GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700161 VkDescriptorType type, uint32_t count) {
162 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500163}
164
jvanverth62340062016-04-26 08:01:44 -0700165GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrTextureParams& params,
166 uint32_t mipLevels) {
167 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, mipLevels));
egdaniel8b6394c2016-03-04 07:35:10 -0800168 if (!sampler) {
jvanverth62340062016-04-26 08:01:44 -0700169 sampler = GrVkSampler::Create(fGpu, params, mipLevels);
egdaniel8b6394c2016-03-04 07:35:10 -0800170 fSamplers.add(sampler);
171 }
172 SkASSERT(sampler);
173 sampler->ref();
174 return sampler;
175}
176
egdanielaf132772016-03-28 12:39:29 -0700177sk_sp<GrVkPipelineState> GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700178 const GrPipeline& pipeline,
179 const GrPrimitiveProcessor& proc,
180 GrPrimitiveType primitiveType,
181 const GrVkRenderPass& renderPass) {
182 return fPipelineStateCache->refPipelineState(pipeline, proc, primitiveType, renderPass);
183}
184
egdaniel707bbd62016-07-26 07:19:47 -0700185void GrVkResourceProvider::getSamplerDescriptorSetHandle(const GrVkUniformHandler& uniformHandler,
186 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700187 SkASSERT(handle);
188 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
egdaniel707bbd62016-07-26 07:19:47 -0700189 if (fDescriptorSetManagers[i].isCompatible(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
190 &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700191 *handle = GrVkDescriptorSetManager::Handle(i);
192 return;
193 }
194 }
195
egdaniel707bbd62016-07-26 07:19:47 -0700196 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
197 &uniformHandler);
egdaniela95220d2016-07-21 11:50:37 -0700198 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
199}
200
egdaniel4d866df2016-08-25 13:52:00 -0700201void GrVkResourceProvider::getSamplerDescriptorSetHandle(const SkTArray<uint32_t>& visibilities,
202 GrVkDescriptorSetManager::Handle* handle) {
203 SkASSERT(handle);
204 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
205 if (fDescriptorSetManagers[i].isCompatible(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
206 visibilities)) {
207 *handle = GrVkDescriptorSetManager::Handle(i);
208 return;
209 }
210 }
211
212 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
213 visibilities);
214 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
215}
216
egdaniel707bbd62016-07-26 07:19:47 -0700217VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
218 SkASSERT(fUniformDSHandle.isValid());
219 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].layout();
220}
221
222VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
223 const GrVkDescriptorSetManager::Handle& handle) const {
224 SkASSERT(handle.isValid());
225 return fDescriptorSetManagers[handle.toIndex()].layout();
226}
227
egdaniela95220d2016-07-21 11:50:37 -0700228const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
229 SkASSERT(fUniformDSHandle.isValid());
230 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].getDescriptorSet(fGpu,
231 fUniformDSHandle);
232}
233
egdaniel707bbd62016-07-26 07:19:47 -0700234const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
235 const GrVkDescriptorSetManager::Handle& handle) {
236 SkASSERT(handle.isValid());
237 return fDescriptorSetManagers[handle.toIndex()].getDescriptorSet(fGpu, handle);
238}
egdaniela95220d2016-07-21 11:50:37 -0700239
240void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
241 const GrVkDescriptorSetManager::Handle& handle) {
242 SkASSERT(descSet);
243 SkASSERT(handle.isValid());
244 int managerIdx = handle.toIndex();
245 SkASSERT(managerIdx < fDescriptorSetManagers.count());
246 fDescriptorSetManagers[managerIdx].recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700247}
248
jvanverth7ec92412016-07-06 09:24:57 -0700249GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
250 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
251 int count = fAvailableCommandBuffers.count();
252 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700253 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700254 SkASSERT(cmdBuffer->finished(fGpu));
255 fAvailableCommandBuffers.removeShuffle(count - 1);
256 } else {
257 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
258 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500259 fActiveCommandBuffers.push_back(cmdBuffer);
260 cmdBuffer->ref();
261 return cmdBuffer;
262}
263
264void GrVkResourceProvider::checkCommandBuffers() {
265 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
266 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700267 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
268 cmdBuffer->reset(fGpu);
269 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500270 fActiveCommandBuffers.removeShuffle(i);
271 }
272 }
273}
274
jvanverth7ec92412016-07-06 09:24:57 -0700275GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
276 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
277 int count = fAvailableSecondaryCommandBuffers.count();
278 if (count > 0) {
279 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
280 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
281 } else {
282 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
283 }
284 return cmdBuffer;
285}
286
287void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
288 cb->reset(fGpu);
289 fAvailableSecondaryCommandBuffers.push_back(cb);
290}
291
jvanverth4c6e47a2016-07-22 10:34:52 -0700292const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
293 const GrVkResource* resource = nullptr;
294 int count = fAvailableUniformBufferResources.count();
295 if (count > 0) {
296 resource = fAvailableUniformBufferResources[count - 1];
297 fAvailableUniformBufferResources.removeShuffle(count - 1);
298 } else {
299 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
300 }
301 return resource;
302}
303
304void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
305 fAvailableUniformBufferResources.push_back(resource);
306}
307
Greg Daniel164a9f02016-02-22 09:56:40 -0500308void GrVkResourceProvider::destroyResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700309 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500310 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
311 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
312 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700313 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500314 fActiveCommandBuffers[i]->unref(fGpu);
315 }
316 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700317 // release our available command buffers
318 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
319 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
320 SkASSERT(fAvailableCommandBuffers[i]->unique());
321 fAvailableCommandBuffers[i]->unref(fGpu);
322 }
323 fAvailableCommandBuffers.reset();
324
325 // release our available secondary command buffers
326 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
327 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
328 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
329 }
330 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500331
egdanielbc9b2962016-09-27 08:00:53 -0700332 // Release all copy pipelines
333 for (int i = 0; i < fCopyPipelines.count(); ++i) {
334 fCopyPipelines[i]->unref(fGpu);
335 }
336
egdanield62e28b2016-06-07 08:43:30 -0700337 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
338 for (int i = 0; i < fRenderPassArray.count(); ++i) {
339 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500340 }
egdanield62e28b2016-06-07 08:43:30 -0700341 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500342
egdaniel8b6394c2016-03-04 07:35:10 -0800343 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700344 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800345 for (; !iter.done(); ++iter) {
346 (*iter).unref(fGpu);
347 }
348 fSamplers.reset();
349
egdaniel22281c12016-03-23 13:49:40 -0700350 fPipelineStateCache->release();
351
jvanverth03509ea2016-03-02 13:19:47 -0800352 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
353 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700354
egdaniela95220d2016-07-21 11:50:37 -0700355 // We must release/destroy all command buffers and pipeline states before releasing the
356 // GrVkDescriptorSetManagers
357 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
358 fDescriptorSetManagers[i].release(fGpu);
359 }
360 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700361
362 // release our uniform buffers
363 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
364 SkASSERT(fAvailableUniformBufferResources[i]->unique());
365 fAvailableUniformBufferResources[i]->unref(fGpu);
366 }
367 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500368}
369
370void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700371 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500372 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
373 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700374 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500375 fActiveCommandBuffers[i]->unrefAndAbandon();
376 }
377 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700378 // release our available command buffers
379 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
380 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
381 SkASSERT(fAvailableCommandBuffers[i]->unique());
382 fAvailableCommandBuffers[i]->unrefAndAbandon();
383 }
384 fAvailableCommandBuffers.reset();
385
386 // release our available secondary command buffers
387 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
388 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
389 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
390 }
391 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500392
egdanielbc9b2962016-09-27 08:00:53 -0700393 // Abandon all copy pipelines
394 for (int i = 0; i < fCopyPipelines.count(); ++i) {
395 fCopyPipelines[i]->unrefAndAbandon();
396 }
397
egdanield62e28b2016-06-07 08:43:30 -0700398 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
399 for (int i = 0; i < fRenderPassArray.count(); ++i) {
400 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500401 }
egdanield62e28b2016-06-07 08:43:30 -0700402 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500403
egdaniel8b6394c2016-03-04 07:35:10 -0800404 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700405 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800406 for (; !iter.done(); ++iter) {
407 (*iter).unrefAndAbandon();
408 }
409 fSamplers.reset();
410
egdaniel22281c12016-03-23 13:49:40 -0700411 fPipelineStateCache->abandon();
412
jvanverth03509ea2016-03-02 13:19:47 -0800413 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700414
egdaniela95220d2016-07-21 11:50:37 -0700415 // We must abandon all command buffers and pipeline states before abandoning the
416 // GrVkDescriptorSetManagers
417 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
418 fDescriptorSetManagers[i].abandon();
419 }
420 fDescriptorSetManagers.reset();
421
jvanverth4c6e47a2016-07-22 10:34:52 -0700422 // release our uniform buffers
423 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
424 SkASSERT(fAvailableUniformBufferResources[i]->unique());
425 fAvailableUniformBufferResources[i]->unrefAndAbandon();
426 }
427 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800428}
egdanield62e28b2016-06-07 08:43:30 -0700429
430////////////////////////////////////////////////////////////////////////////////
431
432GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
433 const GrVkGpu* gpu,
434 const GrVkRenderTarget& target)
435 : fLastReturnedIndex(0) {
436 fRenderPasses.emplace_back(new GrVkRenderPass());
437 fRenderPasses[0]->initSimple(gpu, target);
438}
439
440bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
441 const GrVkRenderTarget& target) const {
442 // The first GrVkRenderpass should always exists since we create the basic load store
443 // render pass on create
444 SkASSERT(fRenderPasses[0]);
445 return fRenderPasses[0]->isCompatible(target);
446}
447
egdaniel2feb0932016-06-08 06:48:09 -0700448GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
449 const GrVkGpu* gpu,
450 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700451 const GrVkRenderPass::LoadStoreOps& stencilOps) {
452 for (int i = 0; i < fRenderPasses.count(); ++i) {
453 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700454 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700455 fLastReturnedIndex = idx;
456 return fRenderPasses[idx];
457 }
458 }
egdaniel9cb63402016-06-23 08:37:05 -0700459 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700460 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700461 fLastReturnedIndex = fRenderPasses.count() - 1;
462 return renderPass;
463}
464
egdanield62e28b2016-06-07 08:43:30 -0700465void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
466 for (int i = 0; i < fRenderPasses.count(); ++i) {
467 if (fRenderPasses[i]) {
468 fRenderPasses[i]->unref(gpu);
469 fRenderPasses[i] = nullptr;
470 }
471 }
472}
473
474void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
475 for (int i = 0; i < fRenderPasses.count(); ++i) {
476 if (fRenderPasses[i]) {
477 fRenderPasses[i]->unrefAndAbandon();
478 fRenderPasses[i] = nullptr;
479 }
480 }
481}