blob: b1817cf3ea2e7ba1b626bdfb4fe71ab9a71986a4 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
egdaniel8b6394c2016-03-04 07:35:10 -080010#include "GrTextureParams.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkCommandBuffer.h"
egdaniel707bbd62016-07-26 07:19:47 -070012#include "GrVkGLSLSampler.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050013#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070014#include "GrVkRenderTarget.h"
egdaniel8b6394c2016-03-04 07:35:10 -080015#include "GrVkSampler.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070016#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050017#include "GrVkUtil.h"
18
19#ifdef SK_TRACE_VK_RESOURCES
jvanverthd5f6e9a2016-07-07 08:21:48 -070020GrVkResource::Trace GrVkResource::fTrace;
egdaniel50ead532016-07-13 14:23:26 -070021uint32_t GrVkResource::fKeyCounter = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050022#endif
23
egdaniel778555c2016-05-02 06:50:36 -070024GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
25 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070026 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070027 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050028}
29
30GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070031 SkASSERT(0 == fRenderPassArray.count());
jvanverth03509ea2016-03-02 13:19:47 -080032 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070033 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080034}
35
36void GrVkResourceProvider::init() {
37 VkPipelineCacheCreateInfo createInfo;
38 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
39 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
40 createInfo.pNext = nullptr;
41 createInfo.flags = 0;
42 createInfo.initialDataSize = 0;
43 createInfo.pInitialData = nullptr;
44 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
45 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
46 &fPipelineCache));
47 SkASSERT(VK_SUCCESS == result);
48 if (VK_SUCCESS != result) {
49 fPipelineCache = VK_NULL_HANDLE;
50 }
egdaniel778555c2016-05-02 06:50:36 -070051
egdaniel707bbd62016-07-26 07:19:47 -070052 // Init uniform descriptor objects
53 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
54 SkASSERT(1 == fDescriptorSetManagers.count());
55 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050056}
57
58GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPipeline& pipeline,
59 const GrPrimitiveProcessor& primProc,
60 VkPipelineShaderStageCreateInfo* shaderStageInfo,
61 int shaderStageCount,
62 GrPrimitiveType primitiveType,
63 const GrVkRenderPass& renderPass,
64 VkPipelineLayout layout) {
65
66 return GrVkPipeline::Create(fGpu, pipeline, primProc, shaderStageInfo, shaderStageCount,
jvanverth03509ea2016-03-02 13:19:47 -080067 primitiveType, renderPass, layout, fPipelineCache);
Greg Daniel164a9f02016-02-22 09:56:40 -050068}
69
70
71// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -070072// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -050073// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -070074const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -070075GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
76 CompatibleRPHandle* compatibleHandle) {
77 for (int i = 0; i < fRenderPassArray.count(); ++i) {
78 if (fRenderPassArray[i].isCompatible(target)) {
79 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -050080 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -070081 if (compatibleHandle) {
82 *compatibleHandle = CompatibleRPHandle(i);
83 }
Greg Daniel164a9f02016-02-22 09:56:40 -050084 return renderPass;
85 }
86 }
87
egdanield62e28b2016-06-07 08:43:30 -070088 const GrVkRenderPass* renderPass =
89 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
90 renderPass->ref();
91
92 if (compatibleHandle) {
93 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
94 }
95 return renderPass;
96}
97
98const GrVkRenderPass*
99GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
100 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
101 int index = compatibleHandle.toIndex();
102 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500103 renderPass->ref();
104 return renderPass;
105}
106
egdaniel2feb0932016-06-08 06:48:09 -0700107const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
108 const GrVkRenderTarget& target,
109 const GrVkRenderPass::LoadStoreOps& colorOps,
110 const GrVkRenderPass::LoadStoreOps& resolveOps,
111 const GrVkRenderPass::LoadStoreOps& stencilOps,
112 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700113 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
114 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
115 : &tempRPHandle;
116 *pRPHandle = target.compatibleRenderPassHandle();
117
egdaniel2feb0932016-06-08 06:48:09 -0700118 // This will get us the handle to (and possible create) the compatible set for the specific
119 // GrVkRenderPass we are looking for.
120 this->findCompatibleRenderPass(target, compatibleHandle);
egdaniel066df7c2016-06-08 14:02:27 -0700121 return this->findRenderPass(*pRPHandle, colorOps, resolveOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700122}
123
124const GrVkRenderPass*
125GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
126 const GrVkRenderPass::LoadStoreOps& colorOps,
127 const GrVkRenderPass::LoadStoreOps& resolveOps,
128 const GrVkRenderPass::LoadStoreOps& stencilOps) {
129 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
130 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
131 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
132 colorOps,
133 resolveOps,
134 stencilOps);
135 renderPass->ref();
136 return renderPass;
137}
138
Greg Daniel164a9f02016-02-22 09:56:40 -0500139GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700140 VkDescriptorType type, uint32_t count) {
141 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500142}
143
jvanverth62340062016-04-26 08:01:44 -0700144GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(const GrTextureParams& params,
145 uint32_t mipLevels) {
146 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, mipLevels));
egdaniel8b6394c2016-03-04 07:35:10 -0800147 if (!sampler) {
jvanverth62340062016-04-26 08:01:44 -0700148 sampler = GrVkSampler::Create(fGpu, params, mipLevels);
egdaniel8b6394c2016-03-04 07:35:10 -0800149 fSamplers.add(sampler);
150 }
151 SkASSERT(sampler);
152 sampler->ref();
153 return sampler;
154}
155
egdanielaf132772016-03-28 12:39:29 -0700156sk_sp<GrVkPipelineState> GrVkResourceProvider::findOrCreateCompatiblePipelineState(
egdaniel22281c12016-03-23 13:49:40 -0700157 const GrPipeline& pipeline,
158 const GrPrimitiveProcessor& proc,
159 GrPrimitiveType primitiveType,
160 const GrVkRenderPass& renderPass) {
161 return fPipelineStateCache->refPipelineState(pipeline, proc, primitiveType, renderPass);
162}
163
egdaniel707bbd62016-07-26 07:19:47 -0700164void GrVkResourceProvider::getSamplerDescriptorSetHandle(const GrVkUniformHandler& uniformHandler,
165 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700166 SkASSERT(handle);
167 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
egdaniel707bbd62016-07-26 07:19:47 -0700168 if (fDescriptorSetManagers[i].isCompatible(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
169 &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700170 *handle = GrVkDescriptorSetManager::Handle(i);
171 return;
172 }
173 }
174
egdaniel707bbd62016-07-26 07:19:47 -0700175 fDescriptorSetManagers.emplace_back(fGpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
176 &uniformHandler);
egdaniela95220d2016-07-21 11:50:37 -0700177 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
178}
179
egdaniel707bbd62016-07-26 07:19:47 -0700180VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
181 SkASSERT(fUniformDSHandle.isValid());
182 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].layout();
183}
184
185VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
186 const GrVkDescriptorSetManager::Handle& handle) const {
187 SkASSERT(handle.isValid());
188 return fDescriptorSetManagers[handle.toIndex()].layout();
189}
190
egdaniela95220d2016-07-21 11:50:37 -0700191const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
192 SkASSERT(fUniformDSHandle.isValid());
193 return fDescriptorSetManagers[fUniformDSHandle.toIndex()].getDescriptorSet(fGpu,
194 fUniformDSHandle);
195}
196
egdaniel707bbd62016-07-26 07:19:47 -0700197const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
198 const GrVkDescriptorSetManager::Handle& handle) {
199 SkASSERT(handle.isValid());
200 return fDescriptorSetManagers[handle.toIndex()].getDescriptorSet(fGpu, handle);
201}
egdaniela95220d2016-07-21 11:50:37 -0700202
203void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
204 const GrVkDescriptorSetManager::Handle& handle) {
205 SkASSERT(descSet);
206 SkASSERT(handle.isValid());
207 int managerIdx = handle.toIndex();
208 SkASSERT(managerIdx < fDescriptorSetManagers.count());
209 fDescriptorSetManagers[managerIdx].recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700210}
211
jvanverth7ec92412016-07-06 09:24:57 -0700212GrVkPrimaryCommandBuffer* GrVkResourceProvider::findOrCreatePrimaryCommandBuffer() {
213 GrVkPrimaryCommandBuffer* cmdBuffer = nullptr;
214 int count = fAvailableCommandBuffers.count();
215 if (count > 0) {
egdaniela95220d2016-07-21 11:50:37 -0700216 cmdBuffer = fAvailableCommandBuffers[count - 1];
jvanverth7ec92412016-07-06 09:24:57 -0700217 SkASSERT(cmdBuffer->finished(fGpu));
218 fAvailableCommandBuffers.removeShuffle(count - 1);
219 } else {
220 cmdBuffer = GrVkPrimaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
221 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500222 fActiveCommandBuffers.push_back(cmdBuffer);
223 cmdBuffer->ref();
224 return cmdBuffer;
225}
226
227void GrVkResourceProvider::checkCommandBuffers() {
228 for (int i = fActiveCommandBuffers.count()-1; i >= 0; --i) {
229 if (fActiveCommandBuffers[i]->finished(fGpu)) {
jvanverth7ec92412016-07-06 09:24:57 -0700230 GrVkPrimaryCommandBuffer* cmdBuffer = fActiveCommandBuffers[i];
231 cmdBuffer->reset(fGpu);
232 fAvailableCommandBuffers.push_back(cmdBuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -0500233 fActiveCommandBuffers.removeShuffle(i);
234 }
235 }
236}
237
jvanverth7ec92412016-07-06 09:24:57 -0700238GrVkSecondaryCommandBuffer* GrVkResourceProvider::findOrCreateSecondaryCommandBuffer() {
239 GrVkSecondaryCommandBuffer* cmdBuffer = nullptr;
240 int count = fAvailableSecondaryCommandBuffers.count();
241 if (count > 0) {
242 cmdBuffer = fAvailableSecondaryCommandBuffers[count-1];
243 fAvailableSecondaryCommandBuffers.removeShuffle(count - 1);
244 } else {
245 cmdBuffer = GrVkSecondaryCommandBuffer::Create(fGpu, fGpu->cmdPool());
246 }
247 return cmdBuffer;
248}
249
250void GrVkResourceProvider::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* cb) {
251 cb->reset(fGpu);
252 fAvailableSecondaryCommandBuffers.push_back(cb);
253}
254
jvanverth4c6e47a2016-07-22 10:34:52 -0700255const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
256 const GrVkResource* resource = nullptr;
257 int count = fAvailableUniformBufferResources.count();
258 if (count > 0) {
259 resource = fAvailableUniformBufferResources[count - 1];
260 fAvailableUniformBufferResources.removeShuffle(count - 1);
261 } else {
262 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
263 }
264 return resource;
265}
266
267void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
268 fAvailableUniformBufferResources.push_back(resource);
269}
270
Greg Daniel164a9f02016-02-22 09:56:40 -0500271void GrVkResourceProvider::destroyResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700272 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500273 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
274 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
275 SkASSERT(fActiveCommandBuffers[i]->unique());
jvanverth069c4642016-07-06 12:56:11 -0700276 fActiveCommandBuffers[i]->reset(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500277 fActiveCommandBuffers[i]->unref(fGpu);
278 }
279 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700280 // release our available command buffers
281 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
282 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
283 SkASSERT(fAvailableCommandBuffers[i]->unique());
284 fAvailableCommandBuffers[i]->unref(fGpu);
285 }
286 fAvailableCommandBuffers.reset();
287
288 // release our available secondary command buffers
289 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
290 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
291 fAvailableSecondaryCommandBuffers[i]->unref(fGpu);
292 }
293 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500294
egdanield62e28b2016-06-07 08:43:30 -0700295 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
296 for (int i = 0; i < fRenderPassArray.count(); ++i) {
297 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500298 }
egdanield62e28b2016-06-07 08:43:30 -0700299 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500300
egdaniel8b6394c2016-03-04 07:35:10 -0800301 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700302 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800303 for (; !iter.done(); ++iter) {
304 (*iter).unref(fGpu);
305 }
306 fSamplers.reset();
307
egdaniel22281c12016-03-23 13:49:40 -0700308 fPipelineStateCache->release();
309
jvanverth03509ea2016-03-02 13:19:47 -0800310 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
311 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700312
egdaniela95220d2016-07-21 11:50:37 -0700313 // We must release/destroy all command buffers and pipeline states before releasing the
314 // GrVkDescriptorSetManagers
315 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
316 fDescriptorSetManagers[i].release(fGpu);
317 }
318 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700319
320 // release our uniform buffers
321 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
322 SkASSERT(fAvailableUniformBufferResources[i]->unique());
323 fAvailableUniformBufferResources[i]->unref(fGpu);
324 }
325 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500326}
327
328void GrVkResourceProvider::abandonResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700329 // release our active command buffers
Greg Daniel164a9f02016-02-22 09:56:40 -0500330 for (int i = 0; i < fActiveCommandBuffers.count(); ++i) {
331 SkASSERT(fActiveCommandBuffers[i]->finished(fGpu));
jvanverth7ec92412016-07-06 09:24:57 -0700332 SkASSERT(fActiveCommandBuffers[i]->unique());
Greg Daniel164a9f02016-02-22 09:56:40 -0500333 fActiveCommandBuffers[i]->unrefAndAbandon();
334 }
335 fActiveCommandBuffers.reset();
jvanverth7ec92412016-07-06 09:24:57 -0700336 // release our available command buffers
337 for (int i = 0; i < fAvailableCommandBuffers.count(); ++i) {
338 SkASSERT(fAvailableCommandBuffers[i]->finished(fGpu));
339 SkASSERT(fAvailableCommandBuffers[i]->unique());
340 fAvailableCommandBuffers[i]->unrefAndAbandon();
341 }
342 fAvailableCommandBuffers.reset();
343
344 // release our available secondary command buffers
345 for (int i = 0; i < fAvailableSecondaryCommandBuffers.count(); ++i) {
346 SkASSERT(fAvailableSecondaryCommandBuffers[i]->unique());
347 fAvailableSecondaryCommandBuffers[i]->unrefAndAbandon();
348 }
349 fAvailableSecondaryCommandBuffers.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500350
egdanield62e28b2016-06-07 08:43:30 -0700351 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
352 for (int i = 0; i < fRenderPassArray.count(); ++i) {
353 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500354 }
egdanield62e28b2016-06-07 08:43:30 -0700355 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500356
egdaniel8b6394c2016-03-04 07:35:10 -0800357 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
jvanverth62340062016-04-26 08:01:44 -0700358 SkTDynamicHash<GrVkSampler, uint16_t>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800359 for (; !iter.done(); ++iter) {
360 (*iter).unrefAndAbandon();
361 }
362 fSamplers.reset();
363
egdaniel22281c12016-03-23 13:49:40 -0700364 fPipelineStateCache->abandon();
365
jvanverth03509ea2016-03-02 13:19:47 -0800366 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700367
egdaniela95220d2016-07-21 11:50:37 -0700368 // We must abandon all command buffers and pipeline states before abandoning the
369 // GrVkDescriptorSetManagers
370 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
371 fDescriptorSetManagers[i].abandon();
372 }
373 fDescriptorSetManagers.reset();
374
jvanverth4c6e47a2016-07-22 10:34:52 -0700375 // release our uniform buffers
376 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
377 SkASSERT(fAvailableUniformBufferResources[i]->unique());
378 fAvailableUniformBufferResources[i]->unrefAndAbandon();
379 }
380 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800381}
egdanield62e28b2016-06-07 08:43:30 -0700382
383////////////////////////////////////////////////////////////////////////////////
384
385GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
386 const GrVkGpu* gpu,
387 const GrVkRenderTarget& target)
388 : fLastReturnedIndex(0) {
389 fRenderPasses.emplace_back(new GrVkRenderPass());
390 fRenderPasses[0]->initSimple(gpu, target);
391}
392
393bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
394 const GrVkRenderTarget& target) const {
395 // The first GrVkRenderpass should always exists since we create the basic load store
396 // render pass on create
397 SkASSERT(fRenderPasses[0]);
398 return fRenderPasses[0]->isCompatible(target);
399}
400
egdaniel2feb0932016-06-08 06:48:09 -0700401GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
402 const GrVkGpu* gpu,
403 const GrVkRenderPass::LoadStoreOps& colorOps,
404 const GrVkRenderPass::LoadStoreOps& resolveOps,
405 const GrVkRenderPass::LoadStoreOps& stencilOps) {
406 for (int i = 0; i < fRenderPasses.count(); ++i) {
407 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
408 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, resolveOps, stencilOps)) {
409 fLastReturnedIndex = idx;
410 return fRenderPasses[idx];
411 }
412 }
egdaniel9cb63402016-06-23 08:37:05 -0700413 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdaniel2feb0932016-06-08 06:48:09 -0700414 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, resolveOps, stencilOps);
415 fLastReturnedIndex = fRenderPasses.count() - 1;
416 return renderPass;
417}
418
egdanield62e28b2016-06-07 08:43:30 -0700419void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(const GrVkGpu* gpu) {
420 for (int i = 0; i < fRenderPasses.count(); ++i) {
421 if (fRenderPasses[i]) {
422 fRenderPasses[i]->unref(gpu);
423 fRenderPasses[i] = nullptr;
424 }
425 }
426}
427
428void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
429 for (int i = 0; i < fRenderPasses.count(); ++i) {
430 if (fRenderPasses[i]) {
431 fRenderPasses[i]->unrefAndAbandon();
432 fRenderPasses[i] = nullptr;
433 }
434 }
435}