blob: ccf47167c66294da3f477dff305e08493f36d8be [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
egdaniel8b6394c2016-03-04 07:35:10 -080010#include "GrTextureParams.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
12#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070013#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080014#include "GrVkSampler.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050015#include "GrVkUtil.h"
16
17#ifdef SK_TRACE_VK_RESOURCES
jvanverthd5f6e9a2016-07-07 08:21:48 -070018GrVkResource::Trace GrVkResource::fTrace;
egdaniel50ead532016-07-13 14:23:26 -070019uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050020#endif
21
egdaniel778555c2016-05-02 06:50:36 -070022GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
23 : fGpu(gpu)
24 , fPipelineCache(VK_NULL_HANDLE)
egdaniel778555c2016-05-02 06:50:36 -070025 , fCurrentUniformDescCount(0) {
egdaniel22281c12016-03-23 13:49:40 -070026 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050027}
28
29GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070030 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080031 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070032 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080033}
34
egdaniel778555c2016-05-02 06:50:36 -070035void GrVkResourceProvider::initUniformDescObjects() {
36 // Create Uniform Buffer Descriptor
37 // The vertex uniform buffer will have binding 0 and the fragment binding 1.
38 VkDescriptorSetLayoutBinding dsUniBindings[2];
39 memset(&dsUniBindings, 0, 2 * sizeof(VkDescriptorSetLayoutBinding));
40 dsUniBindings[0].binding = GrVkUniformHandler::kVertexBinding;
41 dsUniBindings[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
42 dsUniBindings[0].descriptorCount = 1;
43 dsUniBindings[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
44 dsUniBindings[0].pImmutableSamplers = nullptr;
45 dsUniBindings[1].binding = GrVkUniformHandler::kFragBinding;
46 dsUniBindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
47 dsUniBindings[1].descriptorCount = 1;
48 dsUniBindings[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
49 dsUniBindings[1].pImmutableSamplers = nullptr;
50
51 VkDescriptorSetLayoutCreateInfo dsUniformLayoutCreateInfo;
52 memset(&dsUniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
53 dsUniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
54 dsUniformLayoutCreateInfo.pNext = nullptr;
55 dsUniformLayoutCreateInfo.flags = 0;
56 dsUniformLayoutCreateInfo.bindingCount = 2;
57 dsUniformLayoutCreateInfo.pBindings = dsUniBindings;
58
59 GR_VK_CALL_ERRCHECK(fGpu->vkInterface(), CreateDescriptorSetLayout(fGpu->device(),
60 &dsUniformLayoutCreateInfo,
61 nullptr,
62 &fUniformDescLayout));
egdaniela95220d2016-07-21 11:50:37 -070063
64 this->getDescSetHandle(0, fUniformDescLayout, &fUniformDSHandle);
egdaniel778555c2016-05-02 06:50:36 -070065}
66
jvanverth03509ea2016-03-02 13:19:47 -080067void GrVkResourceProvider::init() {
68 VkPipelineCacheCreateInfo createInfo;
69 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
70 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
71 createInfo.pNext = nullptr;
72 createInfo.flags = 0;
73 createInfo.initialDataSize = 0;
74 createInfo.pInitialData = nullptr;
75 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
76 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
77 &fPipelineCache));
78 SkASSERT(VK_SUCCESS == result);
79 if (VK_SUCCESS != result) {
80 fPipelineCache = VK_NULL_HANDLE;
81 }
egdaniel778555c2016-05-02 06:50:36 -070082
83 this->initUniformDescObjects();
Greg Daniel164a9f02016-02-22 09:56:40 -050084}
85
86GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPipeline& pipeline,
87 const GrPrimitiveProcessor& primProc,
88 VkPipelineShaderStageCreateInfo* shaderStageInfo,
89 int shaderStageCount,
90 GrPrimitiveType primitiveType,
91 const GrVkRenderPass& renderPass,
92 VkPipelineLayout layout) {
93
94 return GrVkPipeline::Create(fGpu, pipeline, primProc, shaderStageInfo, shaderStageCount,
jvanverth03509ea2016-03-02 13:19:47 -080095 primitiveType, renderPass, layout, fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050096}
97
98
99// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700100// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500101// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700102const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700103GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
104 CompatibleRPHandle* compatibleHandle) {
105 for (int i = 0; i < fRenderPassArray.count(); ++i) {
106 if (fRenderPassArray[i].isCompatible(target)) {
107 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500108 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700109 if (compatibleHandle) {
110 *compatibleHandle = CompatibleRPHandle(i);
111 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500112 return renderPass;
113 }
114 }
115
egdanield62e28b2016-06-07 08:43:30 -0700116 const GrVkRenderPass* renderPass =
117 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
118 renderPass->ref();
119
120 if (compatibleHandle) {
121 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
122 }
123 return renderPass;
124}
125
126const GrVkRenderPass*
127GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
128 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
129 int index = compatibleHandle.toIndex();
130 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500131 renderPass->ref();
132 return renderPass;
133}
134
egdaniel2feb0932016-06-08 06:48:09 -0700135const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
136 const GrVkRenderTarget& target,
137 const GrVkRenderPass::LoadStoreOps& colorOps,
138 const GrVkRenderPass::LoadStoreOps& resolveOps,
139 const GrVkRenderPass::LoadStoreOps& stencilOps,
140 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700141 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
142 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
143 : &tempRPHandle;
144 *pRPHandle = target.compatibleRenderPassHandle();
145
egdaniel2feb0932016-06-08 06:48:09 -0700146 // This will get us the handle to (and possible create) the compatible set for the specific
147 // GrVkRenderPass we are looking for.
148 this->findCompatibleRenderPass(target, compatibleHandle);
egdaniel066df7c2016-06-08 14:02:27 -0700149 return this->findRenderPass(*pRPHandle, colorOps, resolveOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700150}
151
152const GrVkRenderPass*
153GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
154 const GrVkRenderPass::LoadStoreOps& colorOps,
155 const GrVkRenderPass::LoadStoreOps& resolveOps,
156 const GrVkRenderPass::LoadStoreOps& stencilOps) {
157 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
158 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
159 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
160 colorOps,
161 resolveOps,
162 stencilOps);
163 renderPass->ref();
164 return renderPass;
165}
166
Greg Daniel164a9f02016-02-22 09:56:40 -0500167GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700168 VkDescriptorType type, uint32_t count) {
169 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500170}
171
jvanverth62340062016-04-26 08:01:44 -0700172GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrTextureParams& params,
173 uint32_t mipLevels) {
174 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, mipLevels));
egdaniel8b6394c2016-03-04 07:35:10 -0800175 if (!sampler) {
jvanverth62340062016-04-26 08:01:44 -0700176 sampler = GrVkSampler::Create(fGpu, params, mipLevels);
egdaniel8b6394c2016-03-04 07:35:10 -0800177 fSamplers.add(sampler);
178 }
179 SkASSERT(sampler);
180 sampler->ref();
181 return sampler;
182}
183
egdanielaf132772016-03-28 12:39:29 -0700184sk_sp<GrVkPipelineState> GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700185 const GrPipeline& pipeline,
186 const GrPrimitiveProcessor& proc,
187 GrPrimitiveType primitiveType,
188 const GrVkRenderPass& renderPass) {
189 return fPipelineStateCache->refPipelineState(pipeline, proc, primitiveType, renderPass);
190}
191
egdaniel778555c2016-05-02 06:50:36 -0700192
egdaniela95220d2016-07-21 11:50:37 -0700193void GrVkResourceProvider::getDescSetHandle(uint32_t numSamplers, VkDescriptorSetLayout layout,
194 GrVkDescriptorSetManager::Handle* handle) {
195 SkASSERT(handle);
196 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
197 if (fDescriptorSetManagers[i].isCompatible(numSamplers)) {
198 *handle = GrVkDescriptorSetManager::Handle(i);
199 return;
200 }
201 }
202
203 // Failed to find a DescSetManager, we must create a new one;
204 VkDescriptorType type = numSamplers ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
205 : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
206
207 fDescriptorSetManagers.emplace_back(fGpu, layout, type, numSamplers);
208 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
209}
210
211const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
212 SkASSERT(fUniformDSHandle.isValid());
213 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].getDescriptorSet(fGpu,
214 fUniformDSHandle);
215}
216
217
218void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
219 const GrVkDescriptorSetManager::Handle& handle) {
220 SkASSERT(descSet);
221 SkASSERT(handle.isValid());
222 int managerIdx = handle.toIndex();
223 SkASSERT(managerIdx < fDescriptorSetManagers.count());
224 fDescriptorSetManagers[managerIdx].recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700225}
226
jvanverth7ec92412016-07-06 09:24:57 -0700227GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
228 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
229 int count = fAvailableCommandBuffers.count();
230 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700231 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700232 SkASSERT(cmdBuffer->finished(fGpu));
233 fAvailableCommandBuffers.removeShuffle(count - 1);
234 } else {
235 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
236 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500237 fActiveCommandBuffers.push_back(cmdBuffer);
238 cmdBuffer->ref();
239 return cmdBuffer;
240}
241
242void GrVkResourceProvider::checkCommandBuffers() {
243 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
244 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700245 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
246 cmdBuffer->reset(fGpu);
247 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500248 fActiveCommandBuffers.removeShuffle(i);
249 }
250 }
251}
252
jvanverth7ec92412016-07-06 09:24:57 -0700253GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
254 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
255 int count = fAvailableSecondaryCommandBuffers.count();
256 if (count > 0) {
257 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
258 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
259 } else {
260 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
261 }
262 return cmdBuffer;
263}
264
265void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
266 cb->reset(fGpu);
267 fAvailableSecondaryCommandBuffers.push_back(cb);
268}
269
Greg Daniel164a9f02016-02-22 09:56:40 -0500270void GrVkResourceProvider::destroyResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700271 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500272 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
273 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
274 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700275 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500276 fActiveCommandBuffers[i]->unref(fGpu);
277 }
278 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700279 // release our available command buffers
280 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
281 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
282 SkASSERT(fAvailableCommandBuffers[i]->unique());
283 fAvailableCommandBuffers[i]->unref(fGpu);
284 }
285 fAvailableCommandBuffers.reset();
286
287 // release our available secondary command buffers
288 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
289 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
290 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
291 }
292 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500293
egdanield62e28b2016-06-07 08:43:30 -0700294 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
295 for (int i = 0; i < fRenderPassArray.count(); ++i) {
296 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500297 }
egdanield62e28b2016-06-07 08:43:30 -0700298 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500299
egdaniel8b6394c2016-03-04 07:35:10 -0800300 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700301 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800302 for (; !iter.done(); ++iter) {
303 (*iter).unref(fGpu);
304 }
305 fSamplers.reset();
306
egdaniel22281c12016-03-23 13:49:40 -0700307 fPipelineStateCache->release();
308
jvanverth03509ea2016-03-02 13:19:47 -0800309 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
310 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700311
312 if (fUniformDescLayout) {
313 GR_VK_CALL(fGpu->vkInterface(), DestroyDescriptorSetLayout(fGpu->device(),
314 fUniformDescLayout,
315 nullptr));
316 fUniformDescLayout = VK_NULL_HANDLE;
317 }
egdaniela95220d2016-07-21 11:50:37 -0700318
319 // We must release/destroy all command buffers and pipeline states before releasing the
320 // GrVkDescriptorSetManagers
321 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
322 fDescriptorSetManagers[i].release(fGpu);
323 }
324 fDescriptorSetManagers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500325}
326
327void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700328 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500329 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
330 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700331 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500332 fActiveCommandBuffers[i]->unrefAndAbandon();
333 }
334 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700335 // release our available command buffers
336 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
337 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
338 SkASSERT(fAvailableCommandBuffers[i]->unique());
339 fAvailableCommandBuffers[i]->unrefAndAbandon();
340 }
341 fAvailableCommandBuffers.reset();
342
343 // release our available secondary command buffers
344 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
345 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
346 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
347 }
348 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500349
egdanield62e28b2016-06-07 08:43:30 -0700350 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
351 for (int i = 0; i < fRenderPassArray.count(); ++i) {
352 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500353 }
egdanield62e28b2016-06-07 08:43:30 -0700354 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500355
egdaniel8b6394c2016-03-04 07:35:10 -0800356 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700357 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800358 for (; !iter.done(); ++iter) {
359 (*iter).unrefAndAbandon();
360 }
361 fSamplers.reset();
362
egdaniel22281c12016-03-23 13:49:40 -0700363 fPipelineStateCache->abandon();
364
jvanverth03509ea2016-03-02 13:19:47 -0800365 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700366
egdaniela95220d2016-07-21 11:50:37 -0700367 // We must abandon all command buffers and pipeline states before abandoning the
368 // GrVkDescriptorSetManagers
369 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
370 fDescriptorSetManagers[i].abandon();
371 }
372 fDescriptorSetManagers.reset();
373
jvanverth03509ea2016-03-02 13:19:47 -0800374}
egdanield62e28b2016-06-07 08:43:30 -0700375
376////////////////////////////////////////////////////////////////////////////////
377
378GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
379 const GrVkGpu* gpu,
380 const GrVkRenderTarget& target)
381 : fLastReturnedIndex(0) {
382 fRenderPasses.emplace_back(new GrVkRenderPass());
383 fRenderPasses[0]->initSimple(gpu, target);
384}
385
386bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
387 const GrVkRenderTarget& target) const {
388 // The first GrVkRenderpass should always exists since we create the basic load store
389 // render pass on create
390 SkASSERT(fRenderPasses[0]);
391 return fRenderPasses[0]->isCompatible(target);
392}
393
egdaniel2feb0932016-06-08 06:48:09 -0700394GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
395 const GrVkGpu* gpu,
396 const GrVkRenderPass::LoadStoreOps& colorOps,
397 const GrVkRenderPass::LoadStoreOps& resolveOps,
398 const GrVkRenderPass::LoadStoreOps& stencilOps) {
399 for (int i = 0; i < fRenderPasses.count(); ++i) {
400 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
401 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, resolveOps, stencilOps)) {
402 fLastReturnedIndex = idx;
403 return fRenderPasses[idx];
404 }
405 }
egdaniel9cb63402016-06-23 08:37:05 -0700406 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdaniel2feb0932016-06-08 06:48:09 -0700407 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, resolveOps, stencilOps);
408 fLastReturnedIndex = fRenderPasses.count() - 1;
409 return renderPass;
410}
411
egdanield62e28b2016-06-07 08:43:30 -0700412void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
413 for (int i = 0; i < fRenderPasses.count(); ++i) {
414 if (fRenderPasses[i]) {
415 fRenderPasses[i]->unref(gpu);
416 fRenderPasses[i] = nullptr;
417 }
418 }
419}
420
421void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
422 for (int i = 0; i < fRenderPasses.count(); ++i) {
423 if (fRenderPasses[i]) {
424 fRenderPasses[i]->unrefAndAbandon();
425 fRenderPasses[i] = nullptr;
426 }
427 }
428}