blob: ca4d52e9e60b36cbf259a294f536f7d6d2ca8831 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
egdaniel8b6394c2016-03-04 07:35:10 -080010#include "GrTextureParams.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
egdanielbc9b2962016-09-27 08:00:53 -070012#include "GrVkCopyPipeline.h"
egdaniel707bbd62016-07-26 07:19:47 -070013#include "GrVkGLSLSampler.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070015#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080016#include "GrVkSampler.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070017#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrVkUtil.h"
19
20#ifdef SK_TRACE_VK_RESOURCES
jvanverthd5f6e9a2016-07-07 08:21:48 -070021GrVkResource::Trace GrVkResource::fTrace;
egdaniel50ead532016-07-13 14:23:26 -070022uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050023#endif
24
egdaniel778555c2016-05-02 06:50:36 -070025GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
26 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070027 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070028 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050029}
30
31GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070032 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080033 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070034 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080035}
36
37void GrVkResourceProvider::init() {
38 VkPipelineCacheCreateInfo createInfo;
39 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
40 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
41 createInfo.pNext = nullptr;
42 createInfo.flags = 0;
43 createInfo.initialDataSize = 0;
44 createInfo.pInitialData = nullptr;
45 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
46 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
47 &fPipelineCache));
48 SkASSERT(VK_SUCCESS == result);
49 if (VK_SUCCESS != result) {
50 fPipelineCache = VK_NULL_HANDLE;
51 }
egdaniel778555c2016-05-02 06:50:36 -070052
egdaniel707bbd62016-07-26 07:19:47 -070053 // Init uniform descriptor objects
54 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
55 SkASSERT(1 == fDescriptorSetManagers.count());
56 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050057}
58
59GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070060 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050061 const GrPrimitiveProcessor& primProc,
62 VkPipelineShaderStageCreateInfo* shaderStageInfo,
63 int shaderStageCount,
64 GrPrimitiveType primitiveType,
65 const GrVkRenderPass& renderPass,
66 VkPipelineLayout layout) {
67
csmartdaltonc633abb2016-11-01 08:55:55 -070068 return GrVkPipeline::Create(fGpu, pipeline, stencil, primProc, shaderStageInfo,
69 shaderStageCount, primitiveType, renderPass, layout,
70 fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050071}
72
egdanielbc9b2962016-09-27 08:00:53 -070073GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
74 const GrVkRenderTarget* dst,
75 VkPipelineShaderStageCreateInfo* shaderStageInfo,
76 VkPipelineLayout pipelineLayout) {
77 // Find or Create a compatible pipeline
78 GrVkCopyPipeline* pipeline = nullptr;
79 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
80 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
81 pipeline = fCopyPipelines[i];
82 }
83 }
84 if (!pipeline) {
85 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
86 pipelineLayout,
87 dst->numColorSamples(),
88 *dst->simpleRenderPass(),
89 fPipelineCache);
90 fCopyPipelines.push_back(pipeline);
91 }
92 SkASSERT(pipeline);
93 pipeline->ref();
94 return pipeline;
95}
Greg Daniel164a9f02016-02-22 09:56:40 -050096
97// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -070098// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -050099// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700100const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700101GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
102 CompatibleRPHandle* compatibleHandle) {
103 for (int i = 0; i < fRenderPassArray.count(); ++i) {
104 if (fRenderPassArray[i].isCompatible(target)) {
105 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500106 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700107 if (compatibleHandle) {
108 *compatibleHandle = CompatibleRPHandle(i);
109 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500110 return renderPass;
111 }
112 }
113
egdanield62e28b2016-06-07 08:43:30 -0700114 const GrVkRenderPass* renderPass =
115 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
116 renderPass->ref();
117
118 if (compatibleHandle) {
119 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
120 }
121 return renderPass;
122}
123
124const GrVkRenderPass*
125GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
126 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
127 int index = compatibleHandle.toIndex();
128 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500129 renderPass->ref();
130 return renderPass;
131}
132
egdaniel2feb0932016-06-08 06:48:09 -0700133const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
134 const GrVkRenderTarget& target,
135 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700136 const GrVkRenderPass::LoadStoreOps& stencilOps,
137 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700138 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
139 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
140 : &tempRPHandle;
141 *pRPHandle = target.compatibleRenderPassHandle();
142
egdaniel2feb0932016-06-08 06:48:09 -0700143 // This will get us the handle to (and possible create) the compatible set for the specific
144 // GrVkRenderPass we are looking for.
145 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400146 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700147}
148
149const GrVkRenderPass*
150GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
151 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700152 const GrVkRenderPass::LoadStoreOps& stencilOps) {
153 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
154 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
155 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
156 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700157 stencilOps);
158 renderPass->ref();
159 return renderPass;
160}
161
Greg Daniel164a9f02016-02-22 09:56:40 -0500162GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700163 VkDescriptorType type, uint32_t count) {
164 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500165}
166
jvanverth62340062016-04-26 08:01:44 -0700167GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrTextureParams& params,
168 uint32_t mipLevels) {
169 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, mipLevels));
egdaniel8b6394c2016-03-04 07:35:10 -0800170 if (!sampler) {
jvanverth62340062016-04-26 08:01:44 -0700171 sampler = GrVkSampler::Create(fGpu, params, mipLevels);
egdaniel8b6394c2016-03-04 07:35:10 -0800172 fSamplers.add(sampler);
173 }
174 SkASSERT(sampler);
175 sampler->ref();
176 return sampler;
177}
178
egdanielaf132772016-03-28 12:39:29 -0700179sk_sp<GrVkPipelineState> GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700180 const GrPipeline& pipeline,
181 const GrPrimitiveProcessor& proc,
182 GrPrimitiveType primitiveType,
183 const GrVkRenderPass& renderPass) {
184 return fPipelineStateCache->refPipelineState(pipeline, proc, primitiveType, renderPass);
185}
186
egdaniel707bbd62016-07-26 07:19:47 -0700187void GrVkResourceProvider::getSamplerDescriptorSetHandle(const GrVkUniformHandler& uniformHandler,
188 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700189 SkASSERT(handle);
190 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
egdaniel707bbd62016-07-26 07:19:47 -0700191 if (fDescriptorSetManagers[i].isCompatible(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
192 &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700193 *handle = GrVkDescriptorSetManager::Handle(i);
194 return;
195 }
196 }
197
egdaniel707bbd62016-07-26 07:19:47 -0700198 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
199 &uniformHandler);
egdaniela95220d2016-07-21 11:50:37 -0700200 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
201}
202
egdaniel4d866df2016-08-25 13:52:00 -0700203void GrVkResourceProvider::getSamplerDescriptorSetHandle(const SkTArray<uint32_t>& visibilities,
204 GrVkDescriptorSetManager::Handle* handle) {
205 SkASSERT(handle);
206 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
207 if (fDescriptorSetManagers[i].isCompatible(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
208 visibilities)) {
209 *handle = GrVkDescriptorSetManager::Handle(i);
210 return;
211 }
212 }
213
214 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
215 visibilities);
216 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
217}
218
egdaniel707bbd62016-07-26 07:19:47 -0700219VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
220 SkASSERT(fUniformDSHandle.isValid());
221 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].layout();
222}
223
224VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
225 const GrVkDescriptorSetManager::Handle& handle) const {
226 SkASSERT(handle.isValid());
227 return fDescriptorSetManagers[handle.toIndex()].layout();
228}
229
egdaniela95220d2016-07-21 11:50:37 -0700230const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
231 SkASSERT(fUniformDSHandle.isValid());
232 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].getDescriptorSet(fGpu,
233 fUniformDSHandle);
234}
235
egdaniel707bbd62016-07-26 07:19:47 -0700236const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
237 const GrVkDescriptorSetManager::Handle& handle) {
238 SkASSERT(handle.isValid());
239 return fDescriptorSetManagers[handle.toIndex()].getDescriptorSet(fGpu, handle);
240}
egdaniela95220d2016-07-21 11:50:37 -0700241
242void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
243 const GrVkDescriptorSetManager::Handle& handle) {
244 SkASSERT(descSet);
245 SkASSERT(handle.isValid());
246 int managerIdx = handle.toIndex();
247 SkASSERT(managerIdx < fDescriptorSetManagers.count());
248 fDescriptorSetManagers[managerIdx].recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700249}
250
jvanverth7ec92412016-07-06 09:24:57 -0700251GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
252 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
253 int count = fAvailableCommandBuffers.count();
254 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700255 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700256 SkASSERT(cmdBuffer->finished(fGpu));
257 fAvailableCommandBuffers.removeShuffle(count - 1);
258 } else {
259 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
260 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500261 fActiveCommandBuffers.push_back(cmdBuffer);
262 cmdBuffer->ref();
263 return cmdBuffer;
264}
265
266void GrVkResourceProvider::checkCommandBuffers() {
267 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
268 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700269 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
270 cmdBuffer->reset(fGpu);
271 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500272 fActiveCommandBuffers.removeShuffle(i);
273 }
274 }
275}
276
jvanverth7ec92412016-07-06 09:24:57 -0700277GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
278 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
279 int count = fAvailableSecondaryCommandBuffers.count();
280 if (count > 0) {
281 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
282 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
283 } else {
284 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
285 }
286 return cmdBuffer;
287}
288
289void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
290 cb->reset(fGpu);
291 fAvailableSecondaryCommandBuffers.push_back(cb);
292}
293
jvanverth4c6e47a2016-07-22 10:34:52 -0700294const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
295 const GrVkResource* resource = nullptr;
296 int count = fAvailableUniformBufferResources.count();
297 if (count > 0) {
298 resource = fAvailableUniformBufferResources[count - 1];
299 fAvailableUniformBufferResources.removeShuffle(count - 1);
300 } else {
301 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
302 }
303 return resource;
304}
305
306void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
307 fAvailableUniformBufferResources.push_back(resource);
308}
309
Greg Daniel164a9f02016-02-22 09:56:40 -0500310void GrVkResourceProvider::destroyResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700311 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500312 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
313 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
314 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700315 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500316 fActiveCommandBuffers[i]->unref(fGpu);
317 }
318 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700319 // release our available command buffers
320 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
321 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
322 SkASSERT(fAvailableCommandBuffers[i]->unique());
323 fAvailableCommandBuffers[i]->unref(fGpu);
324 }
325 fAvailableCommandBuffers.reset();
326
327 // release our available secondary command buffers
328 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
329 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
330 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
331 }
332 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500333
egdanielbc9b2962016-09-27 08:00:53 -0700334 // Release all copy pipelines
335 for (int i = 0; i < fCopyPipelines.count(); ++i) {
336 fCopyPipelines[i]->unref(fGpu);
337 }
338
egdanield62e28b2016-06-07 08:43:30 -0700339 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
340 for (int i = 0; i < fRenderPassArray.count(); ++i) {
341 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500342 }
egdanield62e28b2016-06-07 08:43:30 -0700343 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500344
egdaniel8b6394c2016-03-04 07:35:10 -0800345 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700346 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800347 for (; !iter.done(); ++iter) {
348 (*iter).unref(fGpu);
349 }
350 fSamplers.reset();
351
egdaniel22281c12016-03-23 13:49:40 -0700352 fPipelineStateCache->release();
353
jvanverth03509ea2016-03-02 13:19:47 -0800354 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
355 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700356
egdaniela95220d2016-07-21 11:50:37 -0700357 // We must release/destroy all command buffers and pipeline states before releasing the
358 // GrVkDescriptorSetManagers
359 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
360 fDescriptorSetManagers[i].release(fGpu);
361 }
362 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700363
364 // release our uniform buffers
365 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
366 SkASSERT(fAvailableUniformBufferResources[i]->unique());
367 fAvailableUniformBufferResources[i]->unref(fGpu);
368 }
369 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500370}
371
372void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700373 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500374 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
375 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700376 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500377 fActiveCommandBuffers[i]->unrefAndAbandon();
378 }
379 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700380 // release our available command buffers
381 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
382 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
383 SkASSERT(fAvailableCommandBuffers[i]->unique());
384 fAvailableCommandBuffers[i]->unrefAndAbandon();
385 }
386 fAvailableCommandBuffers.reset();
387
388 // release our available secondary command buffers
389 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
390 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
391 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
392 }
393 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500394
egdanielbc9b2962016-09-27 08:00:53 -0700395 // Abandon all copy pipelines
396 for (int i = 0; i < fCopyPipelines.count(); ++i) {
397 fCopyPipelines[i]->unrefAndAbandon();
398 }
399
egdanield62e28b2016-06-07 08:43:30 -0700400 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
401 for (int i = 0; i < fRenderPassArray.count(); ++i) {
402 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500403 }
egdanield62e28b2016-06-07 08:43:30 -0700404 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500405
egdaniel8b6394c2016-03-04 07:35:10 -0800406 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700407 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800408 for (; !iter.done(); ++iter) {
409 (*iter).unrefAndAbandon();
410 }
411 fSamplers.reset();
412
egdaniel22281c12016-03-23 13:49:40 -0700413 fPipelineStateCache->abandon();
414
jvanverth03509ea2016-03-02 13:19:47 -0800415 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700416
egdaniela95220d2016-07-21 11:50:37 -0700417 // We must abandon all command buffers and pipeline states before abandoning the
418 // GrVkDescriptorSetManagers
419 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
420 fDescriptorSetManagers[i].abandon();
421 }
422 fDescriptorSetManagers.reset();
423
jvanverth4c6e47a2016-07-22 10:34:52 -0700424 // release our uniform buffers
425 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
426 SkASSERT(fAvailableUniformBufferResources[i]->unique());
427 fAvailableUniformBufferResources[i]->unrefAndAbandon();
428 }
429 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800430}
egdanield62e28b2016-06-07 08:43:30 -0700431
432////////////////////////////////////////////////////////////////////////////////
433
434GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
435 const GrVkGpu* gpu,
436 const GrVkRenderTarget& target)
437 : fLastReturnedIndex(0) {
438 fRenderPasses.emplace_back(new GrVkRenderPass());
439 fRenderPasses[0]->initSimple(gpu, target);
440}
441
442bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
443 const GrVkRenderTarget& target) const {
444 // The first GrVkRenderpass should always exists since we create the basic load store
445 // render pass on create
446 SkASSERT(fRenderPasses[0]);
447 return fRenderPasses[0]->isCompatible(target);
448}
449
egdaniel2feb0932016-06-08 06:48:09 -0700450GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
451 const GrVkGpu* gpu,
452 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700453 const GrVkRenderPass::LoadStoreOps& stencilOps) {
454 for (int i = 0; i < fRenderPasses.count(); ++i) {
455 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700456 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700457 fLastReturnedIndex = idx;
458 return fRenderPasses[idx];
459 }
460 }
egdaniel9cb63402016-06-23 08:37:05 -0700461 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700462 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700463 fLastReturnedIndex = fRenderPasses.count() - 1;
464 return renderPass;
465}
466
egdanield62e28b2016-06-07 08:43:30 -0700467void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
468 for (int i = 0; i < fRenderPasses.count(); ++i) {
469 if (fRenderPasses[i]) {
470 fRenderPasses[i]->unref(gpu);
471 fRenderPasses[i] = nullptr;
472 }
473 }
474}
475
476void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
477 for (int i = 0; i < fRenderPasses.count(); ++i) {
478 if (fRenderPasses[i]) {
479 fRenderPasses[i]->unrefAndAbandon();
480 fRenderPasses[i] = nullptr;
481 }
482 }
483}