blob: 74e2491e037273d439830fa5faaf489a791b73b0 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Ethan Nicholas8e265a72018-12-12 16:22:40 -050010#include "GrContextPriv.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040011#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkCommandBuffer.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050013#include "GrVkCommandPool.h"
egdanielbc9b2962016-09-27 08:00:53 -070014#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000015#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkRenderTarget.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070018#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019#include "GrVkUtil.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050020#include "SkTaskGroup.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050021
22#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050023std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050024#endif
25
egdaniel778555c2016-05-02 06:50:36 -070026GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
27 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070028 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070029 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050030}
31
32GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070033 SkASSERT(0 == fRenderPassArray.count());
Greg Danielb46add82019-01-02 14:51:29 -050034 SkASSERT(0 == fExternalRenderPasses.count());
jvanverth03509ea2016-03-02 13:19:47 -080035 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070036 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080037}
38
Greg Daniela870b462019-01-08 15:49:46 -050039VkPipelineCache GrVkResourceProvider::pipelineCache() {
40 if (fPipelineCache == VK_NULL_HANDLE) {
41 VkPipelineCacheCreateInfo createInfo;
42 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
43 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
44 createInfo.pNext = nullptr;
45 createInfo.flags = 0;
egdaniel778555c2016-05-02 06:50:36 -070046
Greg Daniela870b462019-01-08 15:49:46 -050047 auto persistentCache = fGpu->getContext()->contextPriv().getPersistentCache();
48 sk_sp<SkData> cached;
49 if (persistentCache) {
50 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
51 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
52 cached = persistentCache->load(*keyData);
53 }
54 bool usedCached = false;
55 if (cached) {
56 uint32_t* cacheHeader = (uint32_t*)cached->data();
57 if (cacheHeader[1] == VK_PIPELINE_CACHE_HEADER_VERSION_ONE) {
58 // For version one of the header, the total header size is 16 bytes plus
59 // VK_UUID_SIZE bytes. See Section 9.6 (Pipeline Cache) in the vulkan spec to see
60 // the breakdown of these bytes.
61 SkASSERT(cacheHeader[0] == 16 + VK_UUID_SIZE);
62 const VkPhysicalDeviceProperties& devProps = fGpu->physicalDeviceProperties();
63 const uint8_t* supportedPipelineCacheUUID = devProps.pipelineCacheUUID;
64 if (cacheHeader[2] == devProps.vendorID && cacheHeader[3] == devProps.deviceID &&
65 !memcmp(&cacheHeader[4], supportedPipelineCacheUUID, VK_UUID_SIZE)) {
66 createInfo.initialDataSize = cached->size();
67 createInfo.pInitialData = cached->data();
68 usedCached = true;
69 }
70 }
71 }
72 if (!usedCached) {
73 createInfo.initialDataSize = 0;
74 createInfo.pInitialData = nullptr;
75 }
76 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
77 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
78 &fPipelineCache));
79 SkASSERT(VK_SUCCESS == result);
80 if (VK_SUCCESS != result) {
81 fPipelineCache = VK_NULL_HANDLE;
82 }
83 }
84 return fPipelineCache;
85}
86
87void GrVkResourceProvider::init() {
egdaniel707bbd62016-07-26 07:19:47 -070088 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040089 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
90 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070091 SkASSERT(1 == fDescriptorSetManagers.count());
92 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050093}
94
Brian Salomonff168d92018-06-23 15:17:27 -040095GrVkPipeline* GrVkResourceProvider::createPipeline(const GrPrimitiveProcessor& primProc,
96 const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070097 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050098 VkPipelineShaderStageCreateInfo* shaderStageInfo,
99 int shaderStageCount,
100 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400101 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -0500102 VkPipelineLayout layout) {
Brian Salomonff168d92018-06-23 15:17:27 -0400103 return GrVkPipeline::Create(fGpu, primProc, pipeline, stencil, shaderStageInfo,
Greg Daniel99b88e02018-10-03 15:31:20 -0400104 shaderStageCount, primitiveType, compatibleRenderPass, layout,
Greg Daniela870b462019-01-08 15:49:46 -0500105 this->pipelineCache());
Greg Daniel164a9f02016-02-22 09:56:40 -0500106}
107
egdanielbc9b2962016-09-27 08:00:53 -0700108GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
109 const GrVkRenderTarget* dst,
110 VkPipelineShaderStageCreateInfo* shaderStageInfo,
111 VkPipelineLayout pipelineLayout) {
112 // Find or Create a compatible pipeline
113 GrVkCopyPipeline* pipeline = nullptr;
114 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
115 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
116 pipeline = fCopyPipelines[i];
117 }
118 }
119 if (!pipeline) {
120 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
121 pipelineLayout,
122 dst->numColorSamples(),
123 *dst->simpleRenderPass(),
Greg Daniela870b462019-01-08 15:49:46 -0500124 this->pipelineCache());
Greg Danielf3a4ef92018-03-01 11:34:59 -0500125 if (!pipeline) {
126 return nullptr;
127 }
egdanielbc9b2962016-09-27 08:00:53 -0700128 fCopyPipelines.push_back(pipeline);
129 }
130 SkASSERT(pipeline);
131 pipeline->ref();
132 return pipeline;
133}
Greg Daniel164a9f02016-02-22 09:56:40 -0500134
135// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700136// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500137// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700138const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700139GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
140 CompatibleRPHandle* compatibleHandle) {
141 for (int i = 0; i < fRenderPassArray.count(); ++i) {
142 if (fRenderPassArray[i].isCompatible(target)) {
143 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500144 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700145 if (compatibleHandle) {
146 *compatibleHandle = CompatibleRPHandle(i);
147 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500148 return renderPass;
149 }
150 }
151
egdanield62e28b2016-06-07 08:43:30 -0700152 const GrVkRenderPass* renderPass =
153 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
154 renderPass->ref();
155
156 if (compatibleHandle) {
157 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
158 }
159 return renderPass;
160}
161
162const GrVkRenderPass*
163GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
164 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
165 int index = compatibleHandle.toIndex();
166 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500167 renderPass->ref();
168 return renderPass;
169}
170
Greg Danielb46add82019-01-02 14:51:29 -0500171const GrVkRenderPass* GrVkResourceProvider::findCompatibleExternalRenderPass(
172 VkRenderPass renderPass, uint32_t colorAttachmentIndex) {
173 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
174 if (fExternalRenderPasses[i]->isCompatibleExternalRP(renderPass)) {
175 fExternalRenderPasses[i]->ref();
176#ifdef SK_DEBUG
177 uint32_t cachedColorIndex;
178 SkASSERT(fExternalRenderPasses[i]->colorAttachmentIndex(&cachedColorIndex));
179 SkASSERT(cachedColorIndex == colorAttachmentIndex);
180#endif
181 return fExternalRenderPasses[i];
182 }
183 }
184
185 const GrVkRenderPass* newRenderPass = new GrVkRenderPass(renderPass, colorAttachmentIndex);
186 fExternalRenderPasses.push_back(newRenderPass);
187 newRenderPass->ref();
188 return newRenderPass;
189}
190
egdaniel2feb0932016-06-08 06:48:09 -0700191const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
192 const GrVkRenderTarget& target,
193 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700194 const GrVkRenderPass::LoadStoreOps& stencilOps,
195 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700196 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
197 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
198 : &tempRPHandle;
199 *pRPHandle = target.compatibleRenderPassHandle();
200
egdaniel2feb0932016-06-08 06:48:09 -0700201 // This will get us the handle to (and possible create) the compatible set for the specific
202 // GrVkRenderPass we are looking for.
203 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400204 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700205}
206
207const GrVkRenderPass*
208GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
209 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700210 const GrVkRenderPass::LoadStoreOps& stencilOps) {
211 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
212 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
213 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
214 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700215 stencilOps);
216 renderPass->ref();
217 return renderPass;
218}
219
Greg Daniel164a9f02016-02-22 09:56:40 -0500220GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700221 VkDescriptorType type, uint32_t count) {
222 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500223}
224
Greg Daniel7e000222018-12-03 10:08:21 -0500225GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
226 const GrSamplerState& params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
227 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800228 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500229 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
230 if (!sampler) {
231 return nullptr;
232 }
egdaniel8b6394c2016-03-04 07:35:10 -0800233 fSamplers.add(sampler);
234 }
235 SkASSERT(sampler);
236 sampler->ref();
237 return sampler;
238}
239
Greg Daniel7e000222018-12-03 10:08:21 -0500240GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
241 const GrVkYcbcrConversionInfo& ycbcrInfo) {
242 GrVkSamplerYcbcrConversion* ycbcrConversion =
243 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
244 if (!ycbcrConversion) {
245 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
246 if (!ycbcrConversion) {
247 return nullptr;
248 }
249 fYcbcrConversions.add(ycbcrConversion);
250 }
251 SkASSERT(ycbcrConversion);
252 ycbcrConversion->ref();
253 return ycbcrConversion;
254}
255
Greg Daniel09eeefb2017-10-16 15:15:02 -0400256GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Greg Daniel9a51a862018-11-30 10:18:14 -0500257 const GrPipeline& pipeline, const GrPrimitiveProcessor& proc,
258 const GrTextureProxy* const primProcProxies[], GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400259 VkRenderPass compatibleRenderPass) {
Greg Daniel9a51a862018-11-30 10:18:14 -0500260 return fPipelineStateCache->refPipelineState(proc, primProcProxies, pipeline, primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400261 compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700262}
263
Greg Daniela7543782017-05-02 14:01:43 -0400264void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
265 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700266 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700267 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400268 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
269 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700270 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400271 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700272 *handle = GrVkDescriptorSetManager::Handle(i);
273 return;
274 }
275 }
276
Greg Daniel18f96022017-05-04 15:09:03 -0400277 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
278 uniformHandler);
279 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700280 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
281}
282
Greg Daniela7543782017-05-02 14:01:43 -0400283void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
284 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700285 GrVkDescriptorSetManager::Handle* handle) {
286 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400287 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
288 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700289 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400290 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700291 *handle = GrVkDescriptorSetManager::Handle(i);
292 return;
293 }
294 }
295
Greg Daniel18f96022017-05-04 15:09:03 -0400296 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
297 visibilities);
298 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700299 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
300}
301
egdaniel707bbd62016-07-26 07:19:47 -0700302VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
303 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400304 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700305}
306
307VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
308 const GrVkDescriptorSetManager::Handle& handle) const {
309 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400310 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700311}
312
egdaniela95220d2016-07-21 11:50:37 -0700313const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
314 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400315 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
316 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700317}
318
egdaniel707bbd62016-07-26 07:19:47 -0700319const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
320 const GrVkDescriptorSetManager::Handle& handle) {
321 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400322 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700323}
egdaniela95220d2016-07-21 11:50:37 -0700324
325void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
326 const GrVkDescriptorSetManager::Handle& handle) {
327 SkASSERT(descSet);
328 SkASSERT(handle.isValid());
329 int managerIdx = handle.toIndex();
330 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400331 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700332}
333
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500334GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
335 std::unique_lock<std::recursive_mutex> lock(fBackgroundMutex);
336 GrVkCommandPool* result;
337 if (fAvailableCommandPools.count()) {
338 result = fAvailableCommandPools.back();
339 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700340 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500341 result = GrVkCommandPool::Create(fGpu);
jvanverth7ec92412016-07-06 09:24:57 -0700342 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500343 SkASSERT(result->unique());
344 SkDEBUGCODE(
345 for (const GrVkCommandPool* pool : fActiveCommandPools) {
346 SkASSERT(pool != result);
347 }
348 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
349 SkASSERT(pool != result);
350 }
Ben Wagner1c0cacf2019-01-14 12:57:36 -0500351 )
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500352 fActiveCommandPools.push_back(result);
353 result->ref();
354 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500355}
356
357void GrVkResourceProvider::checkCommandBuffers() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500358 for (int i = fActiveCommandPools.count() - 1; i >= 0; --i) {
359 GrVkCommandPool* pool = fActiveCommandPools[i];
360 if (!pool->isOpen()) {
361 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
362 if (buffer->finished(fGpu)) {
363 fActiveCommandPools.removeShuffle(i);
364 this->backgroundReset(pool);
365 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500366 }
367 }
368}
369
jvanverth4c6e47a2016-07-22 10:34:52 -0700370const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
371 const GrVkResource* resource = nullptr;
372 int count = fAvailableUniformBufferResources.count();
373 if (count > 0) {
374 resource = fAvailableUniformBufferResources[count - 1];
375 fAvailableUniformBufferResources.removeShuffle(count - 1);
376 } else {
377 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
378 }
379 return resource;
380}
381
382void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
383 fAvailableUniformBufferResources.push_back(resource);
384}
385
Jim Van Verth09557d72016-11-07 11:10:21 -0500386void GrVkResourceProvider::destroyResources(bool deviceLost) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500387 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
388 if (taskGroup) {
389 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000390 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000391
egdanielbc9b2962016-09-27 08:00:53 -0700392 // Release all copy pipelines
393 for (int i = 0; i < fCopyPipelines.count(); ++i) {
394 fCopyPipelines[i]->unref(fGpu);
395 }
396
egdanield62e28b2016-06-07 08:43:30 -0700397 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
398 for (int i = 0; i < fRenderPassArray.count(); ++i) {
399 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500400 }
egdanield62e28b2016-06-07 08:43:30 -0700401 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500402
Greg Danielb46add82019-01-02 14:51:29 -0500403 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
404 fExternalRenderPasses[i]->unref(fGpu);
405 }
406 fExternalRenderPasses.reset();
407
egdaniel8b6394c2016-03-04 07:35:10 -0800408 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500409 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800410 for (; !iter.done(); ++iter) {
411 (*iter).unref(fGpu);
412 }
413 fSamplers.reset();
414
egdaniel22281c12016-03-23 13:49:40 -0700415 fPipelineStateCache->release();
416
jvanverth03509ea2016-03-02 13:19:47 -0800417 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
418 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700419
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500420 for (GrVkCommandPool* pool : fActiveCommandPools) {
421 SkASSERT(pool->unique());
422 pool->unref(fGpu);
423 }
424 fActiveCommandPools.reset();
425
426 for (GrVkCommandPool* pool : fAvailableCommandPools) {
427 SkASSERT(pool->unique());
428 pool->unref(fGpu);
429 }
430 fAvailableCommandPools.reset();
431
egdaniela95220d2016-07-21 11:50:37 -0700432 // We must release/destroy all command buffers and pipeline states before releasing the
433 // GrVkDescriptorSetManagers
434 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400435 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700436 }
437 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700438
439 // release our uniform buffers
440 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
441 SkASSERT(fAvailableUniformBufferResources[i]->unique());
442 fAvailableUniformBufferResources[i]->unref(fGpu);
443 }
444 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500445}
446
447void GrVkResourceProvider::abandonResources() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500448 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
449 if (taskGroup) {
450 taskGroup->wait();
Greg Daniel164a9f02016-02-22 09:56:40 -0500451 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000452
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500453 // Abandon all command pools
454 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
455 SkASSERT(fActiveCommandPools[i]->unique());
456 fActiveCommandPools[i]->unrefAndAbandon();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000457 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500458 fActiveCommandPools.reset();
459 for (int i = 0; i < fAvailableCommandPools.count(); ++i) {
460 SkASSERT(fAvailableCommandPools[i]->unique());
461 fAvailableCommandPools[i]->unrefAndAbandon();
462 }
463 fAvailableCommandPools.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500464
egdanielbc9b2962016-09-27 08:00:53 -0700465 // Abandon all copy pipelines
466 for (int i = 0; i < fCopyPipelines.count(); ++i) {
467 fCopyPipelines[i]->unrefAndAbandon();
468 }
469
egdanield62e28b2016-06-07 08:43:30 -0700470 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
471 for (int i = 0; i < fRenderPassArray.count(); ++i) {
472 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500473 }
egdanield62e28b2016-06-07 08:43:30 -0700474 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500475
Greg Danielb46add82019-01-02 14:51:29 -0500476 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
477 fExternalRenderPasses[i]->unrefAndAbandon();
478 }
479 fExternalRenderPasses.reset();
480
egdaniel8b6394c2016-03-04 07:35:10 -0800481 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500482 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800483 for (; !iter.done(); ++iter) {
484 (*iter).unrefAndAbandon();
485 }
486 fSamplers.reset();
487
egdaniel22281c12016-03-23 13:49:40 -0700488 fPipelineStateCache->abandon();
489
jvanverth03509ea2016-03-02 13:19:47 -0800490 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700491
egdaniela95220d2016-07-21 11:50:37 -0700492 // We must abandon all command buffers and pipeline states before abandoning the
493 // GrVkDescriptorSetManagers
494 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400495 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700496 }
497 fDescriptorSetManagers.reset();
498
jvanverth4c6e47a2016-07-22 10:34:52 -0700499 // release our uniform buffers
500 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
501 SkASSERT(fAvailableUniformBufferResources[i]->unique());
502 fAvailableUniformBufferResources[i]->unrefAndAbandon();
503 }
504 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800505}
egdanield62e28b2016-06-07 08:43:30 -0700506
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500507void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
508 SkASSERT(pool->unique());
509 pool->releaseResources(fGpu);
510 SkTaskGroup* taskGroup = fGpu->getContext()->contextPriv().getTaskGroup();
511 if (taskGroup) {
512 taskGroup->add([this, pool]() {
513 this->reset(pool);
514 });
515 } else {
516 this->reset(pool);
517 }
518}
519
520void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
521 SkASSERT(pool->unique());
522 pool->reset(fGpu);
523 std::unique_lock<std::recursive_mutex> providerLock(fBackgroundMutex);
524 fAvailableCommandPools.push_back(pool);
525}
526
Greg Daniela870b462019-01-08 15:49:46 -0500527void GrVkResourceProvider::storePipelineCacheData() {
528 size_t dataSize = 0;
529 VkResult result = GR_VK_CALL(fGpu->vkInterface(), GetPipelineCacheData(fGpu->device(),
530 this->pipelineCache(),
531 &dataSize, nullptr));
532 SkASSERT(result == VK_SUCCESS);
533
534 std::unique_ptr<uint8_t[]> data(new uint8_t[dataSize]);
535
536 result = GR_VK_CALL(fGpu->vkInterface(), GetPipelineCacheData(fGpu->device(),
537 this->pipelineCache(),
538 &dataSize,
539 (void*)data.get()));
540 SkASSERT(result == VK_SUCCESS);
541
542 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
543 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
544
545 fGpu->getContext()->contextPriv().getPersistentCache()->store(
546 *keyData, *SkData::MakeWithoutCopy(data.get(), dataSize));
547}
548
egdanield62e28b2016-06-07 08:43:30 -0700549////////////////////////////////////////////////////////////////////////////////
550
551GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
552 const GrVkGpu* gpu,
553 const GrVkRenderTarget& target)
554 : fLastReturnedIndex(0) {
555 fRenderPasses.emplace_back(new GrVkRenderPass());
556 fRenderPasses[0]->initSimple(gpu, target);
557}
558
559bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
560 const GrVkRenderTarget& target) const {
561 // The first GrVkRenderpass should always exists since we create the basic load store
562 // render pass on create
563 SkASSERT(fRenderPasses[0]);
564 return fRenderPasses[0]->isCompatible(target);
565}
566
egdaniel2feb0932016-06-08 06:48:09 -0700567GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
568 const GrVkGpu* gpu,
569 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700570 const GrVkRenderPass::LoadStoreOps& stencilOps) {
571 for (int i = 0; i < fRenderPasses.count(); ++i) {
572 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700573 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700574 fLastReturnedIndex = idx;
575 return fRenderPasses[idx];
576 }
577 }
egdaniel9cb63402016-06-23 08:37:05 -0700578 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700579 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700580 fLastReturnedIndex = fRenderPasses.count() - 1;
581 return renderPass;
582}
583
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500584void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(GrVkGpu* gpu) {
egdanield62e28b2016-06-07 08:43:30 -0700585 for (int i = 0; i < fRenderPasses.count(); ++i) {
586 if (fRenderPasses[i]) {
587 fRenderPasses[i]->unref(gpu);
588 fRenderPasses[i] = nullptr;
589 }
590 }
591}
592
593void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
594 for (int i = 0; i < fRenderPasses.count(); ++i) {
595 if (fRenderPasses[i]) {
596 fRenderPasses[i]->unrefAndAbandon();
597 fRenderPasses[i] = nullptr;
598 }
599 }
600}