blob: e3c19d67a068542d235f43561e1c36e5680601b2 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
egdaniel8b6394c2016-03-04 07:35:10 -080010#include "GrTextureParams.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
12#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070013#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080014#include "GrVkSampler.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050015#include "GrVkUtil.h"
16
17#ifdef SK_TRACE_VK_RESOURCES
jvanverthd5f6e9a2016-07-07 08:21:48 -070018GrVkResource::Trace GrVkResource::fTrace;
egdaniel50ead532016-07-13 14:23:26 -070019uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050020#endif
21
egdaniel778555c2016-05-02 06:50:36 -070022GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
23 : fGpu(gpu)
24 , fPipelineCache(VK_NULL_HANDLE)
25 , fUniformDescPool(nullptr)
26 , fCurrentUniformDescCount(0) {
egdaniel22281c12016-03-23 13:49:40 -070027 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050028}
29
30GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070031 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080032 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070033 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080034}
35
egdaniel778555c2016-05-02 06:50:36 -070036void GrVkResourceProvider::initUniformDescObjects() {
37 // Create Uniform Buffer Descriptor
38 // The vertex uniform buffer will have binding 0 and the fragment binding 1.
39 VkDescriptorSetLayoutBinding dsUniBindings[2];
40 memset(&dsUniBindings, 0, 2 * sizeof(VkDescriptorSetLayoutBinding));
41 dsUniBindings[0].binding = GrVkUniformHandler::kVertexBinding;
42 dsUniBindings[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
43 dsUniBindings[0].descriptorCount = 1;
44 dsUniBindings[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
45 dsUniBindings[0].pImmutableSamplers = nullptr;
46 dsUniBindings[1].binding = GrVkUniformHandler::kFragBinding;
47 dsUniBindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
48 dsUniBindings[1].descriptorCount = 1;
49 dsUniBindings[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
50 dsUniBindings[1].pImmutableSamplers = nullptr;
51
52 VkDescriptorSetLayoutCreateInfo dsUniformLayoutCreateInfo;
53 memset(&dsUniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
54 dsUniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
55 dsUniformLayoutCreateInfo.pNext = nullptr;
56 dsUniformLayoutCreateInfo.flags = 0;
57 dsUniformLayoutCreateInfo.bindingCount = 2;
58 dsUniformLayoutCreateInfo.pBindings = dsUniBindings;
59
60 GR_VK_CALL_ERRCHECK(fGpu->vkInterface(), CreateDescriptorSetLayout(fGpu->device(),
61 &dsUniformLayoutCreateInfo,
62 nullptr,
63 &fUniformDescLayout));
64 fCurrMaxUniDescriptors = kStartNumUniformDescriptors;
65 fUniformDescPool = this->findOrCreateCompatibleDescriptorPool(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
66 fCurrMaxUniDescriptors);
67}
68
jvanverth03509ea2016-03-02 13:19:47 -080069void GrVkResourceProvider::init() {
70 VkPipelineCacheCreateInfo createInfo;
71 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
72 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
73 createInfo.pNext = nullptr;
74 createInfo.flags = 0;
75 createInfo.initialDataSize = 0;
76 createInfo.pInitialData = nullptr;
77 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
78 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
79 &fPipelineCache));
80 SkASSERT(VK_SUCCESS == result);
81 if (VK_SUCCESS != result) {
82 fPipelineCache = VK_NULL_HANDLE;
83 }
egdaniel778555c2016-05-02 06:50:36 -070084
85 this->initUniformDescObjects();
Greg Daniel164a9f02016-02-22 09:56:40 -050086}
87
88GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPipeline& pipeline,
89 const GrPrimitiveProcessor& primProc,
90 VkPipelineShaderStageCreateInfo* shaderStageInfo,
91 int shaderStageCount,
92 GrPrimitiveType primitiveType,
93 const GrVkRenderPass& renderPass,
94 VkPipelineLayout layout) {
95
96 return GrVkPipeline::Create(fGpu, pipeline, primProc, shaderStageInfo, shaderStageCount,
jvanverth03509ea2016-03-02 13:19:47 -080097 primitiveType, renderPass, layout, fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050098}
99
100
101// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700102// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500103// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700104const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700105GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
106 CompatibleRPHandle* compatibleHandle) {
107 for (int i = 0; i < fRenderPassArray.count(); ++i) {
108 if (fRenderPassArray[i].isCompatible(target)) {
109 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500110 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700111 if (compatibleHandle) {
112 *compatibleHandle = CompatibleRPHandle(i);
113 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500114 return renderPass;
115 }
116 }
117
egdanield62e28b2016-06-07 08:43:30 -0700118 const GrVkRenderPass* renderPass =
119 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
120 renderPass->ref();
121
122 if (compatibleHandle) {
123 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
124 }
125 return renderPass;
126}
127
128const GrVkRenderPass*
129GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
130 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
131 int index = compatibleHandle.toIndex();
132 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500133 renderPass->ref();
134 return renderPass;
135}
136
egdaniel2feb0932016-06-08 06:48:09 -0700137const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
138 const GrVkRenderTarget& target,
139 const GrVkRenderPass::LoadStoreOps& colorOps,
140 const GrVkRenderPass::LoadStoreOps& resolveOps,
141 const GrVkRenderPass::LoadStoreOps& stencilOps,
142 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700143 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
144 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
145 : &tempRPHandle;
146 *pRPHandle = target.compatibleRenderPassHandle();
147
egdaniel2feb0932016-06-08 06:48:09 -0700148 // This will get us the handle to (and possible create) the compatible set for the specific
149 // GrVkRenderPass we are looking for.
150 this->findCompatibleRenderPass(target, compatibleHandle);
egdaniel066df7c2016-06-08 14:02:27 -0700151 return this->findRenderPass(*pRPHandle, colorOps, resolveOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700152}
153
154const GrVkRenderPass*
155GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
156 const GrVkRenderPass::LoadStoreOps& colorOps,
157 const GrVkRenderPass::LoadStoreOps& resolveOps,
158 const GrVkRenderPass::LoadStoreOps& stencilOps) {
159 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
160 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
161 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
162 colorOps,
163 resolveOps,
164 stencilOps);
165 renderPass->ref();
166 return renderPass;
167}
168
Greg Daniel164a9f02016-02-22 09:56:40 -0500169GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700170 VkDescriptorType type, uint32_t count) {
171 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500172}
173
jvanverth62340062016-04-26 08:01:44 -0700174GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrTextureParams& params,
175 uint32_t mipLevels) {
176 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, mipLevels));
egdaniel8b6394c2016-03-04 07:35:10 -0800177 if (!sampler) {
jvanverth62340062016-04-26 08:01:44 -0700178 sampler = GrVkSampler::Create(fGpu, params, mipLevels);
egdaniel8b6394c2016-03-04 07:35:10 -0800179 fSamplers.add(sampler);
180 }
181 SkASSERT(sampler);
182 sampler->ref();
183 return sampler;
184}
185
egdanielaf132772016-03-28 12:39:29 -0700186sk_sp<GrVkPipelineState> GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700187 const GrPipeline& pipeline,
188 const GrPrimitiveProcessor& proc,
189 GrPrimitiveType primitiveType,
190 const GrVkRenderPass& renderPass) {
191 return fPipelineStateCache->refPipelineState(pipeline, proc, primitiveType, renderPass);
192}
193
egdaniel778555c2016-05-02 06:50:36 -0700194void GrVkResourceProvider::getUniformDescriptorSet(VkDescriptorSet* ds,
195 const GrVkDescriptorPool** outPool) {
196 fCurrentUniformDescCount += kNumUniformDescPerSet;
197 if (fCurrentUniformDescCount > fCurrMaxUniDescriptors) {
198 fUniformDescPool->unref(fGpu);
egdaniel5c4a3812016-07-06 12:00:12 -0700199 uint32_t newPoolSize = fCurrMaxUniDescriptors + ((fCurrMaxUniDescriptors + 1) >> 1);
200 if (newPoolSize < kMaxUniformDescriptors) {
201 fCurrMaxUniDescriptors = newPoolSize;
egdaniel778555c2016-05-02 06:50:36 -0700202 } else {
203 fCurrMaxUniDescriptors = kMaxUniformDescriptors;
204 }
205 fUniformDescPool =
206 this->findOrCreateCompatibleDescriptorPool(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
207 fCurrMaxUniDescriptors);
208 fCurrentUniformDescCount = kNumUniformDescPerSet;
209 }
210 SkASSERT(fUniformDescPool);
211
212 VkDescriptorSetAllocateInfo dsAllocateInfo;
213 memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
214 dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
215 dsAllocateInfo.pNext = nullptr;
216 dsAllocateInfo.descriptorPool = fUniformDescPool->descPool();
217 dsAllocateInfo.descriptorSetCount = 1;
218 dsAllocateInfo.pSetLayouts = &fUniformDescLayout;
219 GR_VK_CALL_ERRCHECK(fGpu->vkInterface(), AllocateDescriptorSets(fGpu->device(),
220 &dsAllocateInfo,
221 ds));
222 *outPool = fUniformDescPool;
223}
224
jvanverth7ec92412016-07-06 09:24:57 -0700225GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
226 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
227 int count = fAvailableCommandBuffers.count();
228 if (count > 0) {
229 cmdBuffer = fAvailableCommandBuffers[count -1];
230 SkASSERT(cmdBuffer->finished(fGpu));
231 fAvailableCommandBuffers.removeShuffle(count - 1);
232 } else {
233 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
234 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500235 fActiveCommandBuffers.push_back(cmdBuffer);
236 cmdBuffer->ref();
237 return cmdBuffer;
238}
239
240void GrVkResourceProvider::checkCommandBuffers() {
241 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
242 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700243 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
244 cmdBuffer->reset(fGpu);
245 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500246 fActiveCommandBuffers.removeShuffle(i);
247 }
248 }
249}
250
jvanverth7ec92412016-07-06 09:24:57 -0700251GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
252 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
253 int count = fAvailableSecondaryCommandBuffers.count();
254 if (count > 0) {
255 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
256 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
257 } else {
258 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
259 }
260 return cmdBuffer;
261}
262
263void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
264 cb->reset(fGpu);
265 fAvailableSecondaryCommandBuffers.push_back(cb);
266}
267
Greg Daniel164a9f02016-02-22 09:56:40 -0500268void GrVkResourceProvider::destroyResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700269 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500270 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
271 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
272 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700273 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500274 fActiveCommandBuffers[i]->unref(fGpu);
275 }
276 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700277 // release our available command buffers
278 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
279 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
280 SkASSERT(fAvailableCommandBuffers[i]->unique());
281 fAvailableCommandBuffers[i]->unref(fGpu);
282 }
283 fAvailableCommandBuffers.reset();
284
285 // release our available secondary command buffers
286 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
287 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
288 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
289 }
290 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500291
egdanield62e28b2016-06-07 08:43:30 -0700292 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
293 for (int i = 0; i < fRenderPassArray.count(); ++i) {
294 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500295 }
egdanield62e28b2016-06-07 08:43:30 -0700296 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500297
egdaniel8b6394c2016-03-04 07:35:10 -0800298 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700299 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800300 for (; !iter.done(); ++iter) {
301 (*iter).unref(fGpu);
302 }
303 fSamplers.reset();
304
egdaniel22281c12016-03-23 13:49:40 -0700305 fPipelineStateCache->release();
306
jvanverth03509ea2016-03-02 13:19:47 -0800307 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
308 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700309
310 if (fUniformDescLayout) {
311 GR_VK_CALL(fGpu->vkInterface(), DestroyDescriptorSetLayout(fGpu->device(),
312 fUniformDescLayout,
313 nullptr));
314 fUniformDescLayout = VK_NULL_HANDLE;
315 }
316 fUniformDescPool->unref(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500317}
318
319void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700320 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500321 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
322 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700323 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500324 fActiveCommandBuffers[i]->unrefAndAbandon();
325 }
326 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700327 // release our available command buffers
328 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
329 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
330 SkASSERT(fAvailableCommandBuffers[i]->unique());
331 fAvailableCommandBuffers[i]->unrefAndAbandon();
332 }
333 fAvailableCommandBuffers.reset();
334
335 // release our available secondary command buffers
336 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
337 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
338 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
339 }
340 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500341
egdanield62e28b2016-06-07 08:43:30 -0700342 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
343 for (int i = 0; i < fRenderPassArray.count(); ++i) {
344 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500345 }
egdanield62e28b2016-06-07 08:43:30 -0700346 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500347
egdaniel8b6394c2016-03-04 07:35:10 -0800348 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700349 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800350 for (; !iter.done(); ++iter) {
351 (*iter).unrefAndAbandon();
352 }
353 fSamplers.reset();
354
egdaniel22281c12016-03-23 13:49:40 -0700355 fPipelineStateCache->abandon();
356
jvanverth03509ea2016-03-02 13:19:47 -0800357 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700358
359 fUniformDescLayout = VK_NULL_HANDLE;
360 fUniformDescPool->unrefAndAbandon();
jvanverth03509ea2016-03-02 13:19:47 -0800361}
egdanield62e28b2016-06-07 08:43:30 -0700362
363////////////////////////////////////////////////////////////////////////////////
364
365GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
366 const GrVkGpu* gpu,
367 const GrVkRenderTarget& target)
368 : fLastReturnedIndex(0) {
369 fRenderPasses.emplace_back(new GrVkRenderPass());
370 fRenderPasses[0]->initSimple(gpu, target);
371}
372
373bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
374 const GrVkRenderTarget& target) const {
375 // The first GrVkRenderpass should always exists since we create the basic load store
376 // render pass on create
377 SkASSERT(fRenderPasses[0]);
378 return fRenderPasses[0]->isCompatible(target);
379}
380
egdaniel2feb0932016-06-08 06:48:09 -0700381GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
382 const GrVkGpu* gpu,
383 const GrVkRenderPass::LoadStoreOps& colorOps,
384 const GrVkRenderPass::LoadStoreOps& resolveOps,
385 const GrVkRenderPass::LoadStoreOps& stencilOps) {
386 for (int i = 0; i < fRenderPasses.count(); ++i) {
387 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
388 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, resolveOps, stencilOps)) {
389 fLastReturnedIndex = idx;
390 return fRenderPasses[idx];
391 }
392 }
egdaniel9cb63402016-06-23 08:37:05 -0700393 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdaniel2feb0932016-06-08 06:48:09 -0700394 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, resolveOps, stencilOps);
395 fLastReturnedIndex = fRenderPasses.count() - 1;
396 return renderPass;
397}
398
egdanield62e28b2016-06-07 08:43:30 -0700399void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
400 for (int i = 0; i < fRenderPasses.count(); ++i) {
401 if (fRenderPasses[i]) {
402 fRenderPasses[i]->unref(gpu);
403 fRenderPasses[i] = nullptr;
404 }
405 }
406}
407
408void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
409 for (int i = 0; i < fRenderPasses.count(); ++i) {
410 if (fRenderPasses[i]) {
411 fRenderPasses[i]->unrefAndAbandon();
412 fRenderPasses[i] = nullptr;
413 }
414 }
415}