blob: e232e4e40295dabd0e641ddb942da9be3aeeb183 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkResourceProvider.h"
9
Ethan Nicholas8e265a72018-12-12 16:22:40 -050010#include "GrContextPriv.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040011#include "GrSamplerState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkCommandBuffer.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050013#include "GrVkCommandPool.h"
egdanielbc9b2962016-09-27 08:00:53 -070014#include "GrVkCopyPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000015#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkRenderTarget.h"
jvanverth4c6e47a2016-07-22 10:34:52 -070018#include "GrVkUniformBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019#include "GrVkUtil.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050020#include "SkTaskGroup.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050021
22#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050023std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050024#endif
25
egdaniel778555c2016-05-02 06:50:36 -070026GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
27 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070028 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070029 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050030}
31
32GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070033 SkASSERT(0 == fRenderPassArray.count());
Greg Danielb46add82019-01-02 14:51:29 -050034 SkASSERT(0 == fExternalRenderPasses.count());
jvanverth03509ea2016-03-02 13:19:47 -080035 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070036 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080037}
38
Greg Daniela870b462019-01-08 15:49:46 -050039VkPipelineCache GrVkResourceProvider::pipelineCache() {
40 if (fPipelineCache == VK_NULL_HANDLE) {
41 VkPipelineCacheCreateInfo createInfo;
42 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
43 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
44 createInfo.pNext = nullptr;
45 createInfo.flags = 0;
egdaniel778555c2016-05-02 06:50:36 -070046
Robert Phillips9da87e02019-02-04 13:26:26 -050047 auto persistentCache = fGpu->getContext()->priv().getPersistentCache();
Greg Daniela870b462019-01-08 15:49:46 -050048 sk_sp<SkData> cached;
49 if (persistentCache) {
50 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
51 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
52 cached = persistentCache->load(*keyData);
53 }
54 bool usedCached = false;
55 if (cached) {
56 uint32_t* cacheHeader = (uint32_t*)cached->data();
57 if (cacheHeader[1] == VK_PIPELINE_CACHE_HEADER_VERSION_ONE) {
58 // For version one of the header, the total header size is 16 bytes plus
59 // VK_UUID_SIZE bytes. See Section 9.6 (Pipeline Cache) in the vulkan spec to see
60 // the breakdown of these bytes.
61 SkASSERT(cacheHeader[0] == 16 + VK_UUID_SIZE);
62 const VkPhysicalDeviceProperties& devProps = fGpu->physicalDeviceProperties();
63 const uint8_t* supportedPipelineCacheUUID = devProps.pipelineCacheUUID;
64 if (cacheHeader[2] == devProps.vendorID && cacheHeader[3] == devProps.deviceID &&
65 !memcmp(&cacheHeader[4], supportedPipelineCacheUUID, VK_UUID_SIZE)) {
66 createInfo.initialDataSize = cached->size();
67 createInfo.pInitialData = cached->data();
68 usedCached = true;
69 }
70 }
71 }
72 if (!usedCached) {
73 createInfo.initialDataSize = 0;
74 createInfo.pInitialData = nullptr;
75 }
76 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
77 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
78 &fPipelineCache));
79 SkASSERT(VK_SUCCESS == result);
80 if (VK_SUCCESS != result) {
81 fPipelineCache = VK_NULL_HANDLE;
82 }
83 }
84 return fPipelineCache;
85}
86
87void GrVkResourceProvider::init() {
egdaniel707bbd62016-07-26 07:19:47 -070088 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040089 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
90 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070091 SkASSERT(1 == fDescriptorSetManagers.count());
92 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050093}
94
Robert Phillipsd0fe8752019-01-31 14:13:59 -050095GrVkPipeline* GrVkResourceProvider::createPipeline(int numColorSamples,
96 const GrPrimitiveProcessor& primProc,
Brian Salomonff168d92018-06-23 15:17:27 -040097 const GrPipeline& pipeline,
csmartdaltonc633abb2016-11-01 08:55:55 -070098 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050099 VkPipelineShaderStageCreateInfo* shaderStageInfo,
100 int shaderStageCount,
101 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400102 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -0500103 VkPipelineLayout layout) {
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500104 return GrVkPipeline::Create(fGpu, numColorSamples, primProc, pipeline, stencil, shaderStageInfo,
Greg Daniel99b88e02018-10-03 15:31:20 -0400105 shaderStageCount, primitiveType, compatibleRenderPass, layout,
Greg Daniela870b462019-01-08 15:49:46 -0500106 this->pipelineCache());
Greg Daniel164a9f02016-02-22 09:56:40 -0500107}
108
egdanielbc9b2962016-09-27 08:00:53 -0700109GrVkCopyPipeline* GrVkResourceProvider::findOrCreateCopyPipeline(
110 const GrVkRenderTarget* dst,
111 VkPipelineShaderStageCreateInfo* shaderStageInfo,
112 VkPipelineLayout pipelineLayout) {
113 // Find or Create a compatible pipeline
114 GrVkCopyPipeline* pipeline = nullptr;
115 for (int i = 0; i < fCopyPipelines.count() && !pipeline; ++i) {
116 if (fCopyPipelines[i]->isCompatible(*dst->simpleRenderPass())) {
117 pipeline = fCopyPipelines[i];
118 }
119 }
120 if (!pipeline) {
121 pipeline = GrVkCopyPipeline::Create(fGpu, shaderStageInfo,
122 pipelineLayout,
123 dst->numColorSamples(),
124 *dst->simpleRenderPass(),
Greg Daniela870b462019-01-08 15:49:46 -0500125 this->pipelineCache());
Greg Danielf3a4ef92018-03-01 11:34:59 -0500126 if (!pipeline) {
127 return nullptr;
128 }
egdanielbc9b2962016-09-27 08:00:53 -0700129 fCopyPipelines.push_back(pipeline);
130 }
131 SkASSERT(pipeline);
132 pipeline->ref();
133 return pipeline;
134}
Greg Daniel164a9f02016-02-22 09:56:40 -0500135
136// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700137// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500138// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700139const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700140GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
141 CompatibleRPHandle* compatibleHandle) {
142 for (int i = 0; i < fRenderPassArray.count(); ++i) {
143 if (fRenderPassArray[i].isCompatible(target)) {
144 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500145 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700146 if (compatibleHandle) {
147 *compatibleHandle = CompatibleRPHandle(i);
148 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500149 return renderPass;
150 }
151 }
152
egdanield62e28b2016-06-07 08:43:30 -0700153 const GrVkRenderPass* renderPass =
154 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
155 renderPass->ref();
156
157 if (compatibleHandle) {
158 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
159 }
160 return renderPass;
161}
162
163const GrVkRenderPass*
164GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
165 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
166 int index = compatibleHandle.toIndex();
167 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500168 renderPass->ref();
169 return renderPass;
170}
171
Greg Danielb46add82019-01-02 14:51:29 -0500172const GrVkRenderPass* GrVkResourceProvider::findCompatibleExternalRenderPass(
173 VkRenderPass renderPass, uint32_t colorAttachmentIndex) {
174 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
175 if (fExternalRenderPasses[i]->isCompatibleExternalRP(renderPass)) {
176 fExternalRenderPasses[i]->ref();
177#ifdef SK_DEBUG
178 uint32_t cachedColorIndex;
179 SkASSERT(fExternalRenderPasses[i]->colorAttachmentIndex(&cachedColorIndex));
180 SkASSERT(cachedColorIndex == colorAttachmentIndex);
181#endif
182 return fExternalRenderPasses[i];
183 }
184 }
185
186 const GrVkRenderPass* newRenderPass = new GrVkRenderPass(renderPass, colorAttachmentIndex);
187 fExternalRenderPasses.push_back(newRenderPass);
188 newRenderPass->ref();
189 return newRenderPass;
190}
191
egdaniel2feb0932016-06-08 06:48:09 -0700192const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
193 const GrVkRenderTarget& target,
194 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700195 const GrVkRenderPass::LoadStoreOps& stencilOps,
196 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700197 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
198 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
199 : &tempRPHandle;
200 *pRPHandle = target.compatibleRenderPassHandle();
201
egdaniel2feb0932016-06-08 06:48:09 -0700202 // This will get us the handle to (and possible create) the compatible set for the specific
203 // GrVkRenderPass we are looking for.
204 this->findCompatibleRenderPass(target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400205 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700206}
207
208const GrVkRenderPass*
209GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
210 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700211 const GrVkRenderPass::LoadStoreOps& stencilOps) {
212 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
213 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
214 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
215 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700216 stencilOps);
217 renderPass->ref();
218 return renderPass;
219}
220
Greg Daniel164a9f02016-02-22 09:56:40 -0500221GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700222 VkDescriptorType type, uint32_t count) {
223 return new GrVkDescriptorPool(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500224}
225
Greg Daniel7e000222018-12-03 10:08:21 -0500226GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
227 const GrSamplerState& params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
228 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800229 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500230 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
231 if (!sampler) {
232 return nullptr;
233 }
egdaniel8b6394c2016-03-04 07:35:10 -0800234 fSamplers.add(sampler);
235 }
236 SkASSERT(sampler);
237 sampler->ref();
238 return sampler;
239}
240
Greg Daniel7e000222018-12-03 10:08:21 -0500241GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
242 const GrVkYcbcrConversionInfo& ycbcrInfo) {
243 GrVkSamplerYcbcrConversion* ycbcrConversion =
244 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
245 if (!ycbcrConversion) {
246 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
247 if (!ycbcrConversion) {
248 return nullptr;
249 }
250 fYcbcrConversions.add(ycbcrConversion);
251 }
252 SkASSERT(ycbcrConversion);
253 ycbcrConversion->ref();
254 return ycbcrConversion;
255}
256
Greg Daniel09eeefb2017-10-16 15:15:02 -0400257GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500258 GrRenderTarget* renderTarget, GrSurfaceOrigin origin,
Greg Daniel9a51a862018-11-30 10:18:14 -0500259 const GrPipeline& pipeline, const GrPrimitiveProcessor& proc,
260 const GrTextureProxy* const primProcProxies[], GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400261 VkRenderPass compatibleRenderPass) {
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500262 return fPipelineStateCache->refPipelineState(renderTarget, origin, proc, primProcProxies,
263 pipeline, primitiveType, compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700264}
265
Greg Daniela7543782017-05-02 14:01:43 -0400266void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
267 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700268 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700269 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400270 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
271 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700272 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400273 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700274 *handle = GrVkDescriptorSetManager::Handle(i);
275 return;
276 }
277 }
278
Greg Daniel18f96022017-05-04 15:09:03 -0400279 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
280 uniformHandler);
281 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700282 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
283}
284
Greg Daniela7543782017-05-02 14:01:43 -0400285void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
286 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700287 GrVkDescriptorSetManager::Handle* handle) {
288 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400289 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
290 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700291 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400292 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700293 *handle = GrVkDescriptorSetManager::Handle(i);
294 return;
295 }
296 }
297
Greg Daniel18f96022017-05-04 15:09:03 -0400298 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
299 visibilities);
300 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700301 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
302}
303
egdaniel707bbd62016-07-26 07:19:47 -0700304VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
305 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400306 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700307}
308
309VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
310 const GrVkDescriptorSetManager::Handle& handle) const {
311 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400312 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700313}
314
egdaniela95220d2016-07-21 11:50:37 -0700315const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
316 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400317 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
318 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700319}
320
egdaniel707bbd62016-07-26 07:19:47 -0700321const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
322 const GrVkDescriptorSetManager::Handle& handle) {
323 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400324 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700325}
egdaniela95220d2016-07-21 11:50:37 -0700326
327void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
328 const GrVkDescriptorSetManager::Handle& handle) {
329 SkASSERT(descSet);
330 SkASSERT(handle.isValid());
331 int managerIdx = handle.toIndex();
332 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400333 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700334}
335
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500336GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
337 std::unique_lock<std::recursive_mutex> lock(fBackgroundMutex);
338 GrVkCommandPool* result;
339 if (fAvailableCommandPools.count()) {
340 result = fAvailableCommandPools.back();
341 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700342 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500343 result = GrVkCommandPool::Create(fGpu);
jvanverth7ec92412016-07-06 09:24:57 -0700344 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500345 SkASSERT(result->unique());
346 SkDEBUGCODE(
347 for (const GrVkCommandPool* pool : fActiveCommandPools) {
348 SkASSERT(pool != result);
349 }
350 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
351 SkASSERT(pool != result);
352 }
Ben Wagner1c0cacf2019-01-14 12:57:36 -0500353 )
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500354 fActiveCommandPools.push_back(result);
355 result->ref();
356 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500357}
358
359void GrVkResourceProvider::checkCommandBuffers() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500360 for (int i = fActiveCommandPools.count() - 1; i >= 0; --i) {
361 GrVkCommandPool* pool = fActiveCommandPools[i];
362 if (!pool->isOpen()) {
363 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
364 if (buffer->finished(fGpu)) {
365 fActiveCommandPools.removeShuffle(i);
366 this->backgroundReset(pool);
367 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500368 }
369 }
370}
371
jvanverth4c6e47a2016-07-22 10:34:52 -0700372const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
373 const GrVkResource* resource = nullptr;
374 int count = fAvailableUniformBufferResources.count();
375 if (count > 0) {
376 resource = fAvailableUniformBufferResources[count - 1];
377 fAvailableUniformBufferResources.removeShuffle(count - 1);
378 } else {
379 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
380 }
381 return resource;
382}
383
384void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
385 fAvailableUniformBufferResources.push_back(resource);
386}
387
Jim Van Verth09557d72016-11-07 11:10:21 -0500388void GrVkResourceProvider::destroyResources(bool deviceLost) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500389 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500390 if (taskGroup) {
391 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000392 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000393
egdanielbc9b2962016-09-27 08:00:53 -0700394 // Release all copy pipelines
395 for (int i = 0; i < fCopyPipelines.count(); ++i) {
396 fCopyPipelines[i]->unref(fGpu);
397 }
398
egdanield62e28b2016-06-07 08:43:30 -0700399 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
400 for (int i = 0; i < fRenderPassArray.count(); ++i) {
401 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500402 }
egdanield62e28b2016-06-07 08:43:30 -0700403 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500404
Greg Danielb46add82019-01-02 14:51:29 -0500405 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
406 fExternalRenderPasses[i]->unref(fGpu);
407 }
408 fExternalRenderPasses.reset();
409
egdaniel8b6394c2016-03-04 07:35:10 -0800410 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500411 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800412 for (; !iter.done(); ++iter) {
413 (*iter).unref(fGpu);
414 }
415 fSamplers.reset();
416
egdaniel22281c12016-03-23 13:49:40 -0700417 fPipelineStateCache->release();
418
jvanverth03509ea2016-03-02 13:19:47 -0800419 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
420 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700421
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500422 for (GrVkCommandPool* pool : fActiveCommandPools) {
423 SkASSERT(pool->unique());
424 pool->unref(fGpu);
425 }
426 fActiveCommandPools.reset();
427
428 for (GrVkCommandPool* pool : fAvailableCommandPools) {
429 SkASSERT(pool->unique());
430 pool->unref(fGpu);
431 }
432 fAvailableCommandPools.reset();
433
egdaniela95220d2016-07-21 11:50:37 -0700434 // We must release/destroy all command buffers and pipeline states before releasing the
435 // GrVkDescriptorSetManagers
436 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400437 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700438 }
439 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700440
441 // release our uniform buffers
442 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
443 SkASSERT(fAvailableUniformBufferResources[i]->unique());
444 fAvailableUniformBufferResources[i]->unref(fGpu);
445 }
446 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500447}
448
449void GrVkResourceProvider::abandonResources() {
Robert Phillips9da87e02019-02-04 13:26:26 -0500450 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500451 if (taskGroup) {
452 taskGroup->wait();
Greg Daniel164a9f02016-02-22 09:56:40 -0500453 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000454
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500455 // Abandon all command pools
456 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
457 SkASSERT(fActiveCommandPools[i]->unique());
458 fActiveCommandPools[i]->unrefAndAbandon();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000459 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500460 fActiveCommandPools.reset();
461 for (int i = 0; i < fAvailableCommandPools.count(); ++i) {
462 SkASSERT(fAvailableCommandPools[i]->unique());
463 fAvailableCommandPools[i]->unrefAndAbandon();
464 }
465 fAvailableCommandPools.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500466
egdanielbc9b2962016-09-27 08:00:53 -0700467 // Abandon all copy pipelines
468 for (int i = 0; i < fCopyPipelines.count(); ++i) {
469 fCopyPipelines[i]->unrefAndAbandon();
470 }
471
egdanield62e28b2016-06-07 08:43:30 -0700472 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
473 for (int i = 0; i < fRenderPassArray.count(); ++i) {
474 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500475 }
egdanield62e28b2016-06-07 08:43:30 -0700476 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500477
Greg Danielb46add82019-01-02 14:51:29 -0500478 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
479 fExternalRenderPasses[i]->unrefAndAbandon();
480 }
481 fExternalRenderPasses.reset();
482
egdaniel8b6394c2016-03-04 07:35:10 -0800483 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500484 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800485 for (; !iter.done(); ++iter) {
486 (*iter).unrefAndAbandon();
487 }
488 fSamplers.reset();
489
egdaniel22281c12016-03-23 13:49:40 -0700490 fPipelineStateCache->abandon();
491
jvanverth03509ea2016-03-02 13:19:47 -0800492 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700493
egdaniela95220d2016-07-21 11:50:37 -0700494 // We must abandon all command buffers and pipeline states before abandoning the
495 // GrVkDescriptorSetManagers
496 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400497 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700498 }
499 fDescriptorSetManagers.reset();
500
jvanverth4c6e47a2016-07-22 10:34:52 -0700501 // release our uniform buffers
502 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
503 SkASSERT(fAvailableUniformBufferResources[i]->unique());
504 fAvailableUniformBufferResources[i]->unrefAndAbandon();
505 }
506 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800507}
egdanield62e28b2016-06-07 08:43:30 -0700508
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500509void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
510 SkASSERT(pool->unique());
511 pool->releaseResources(fGpu);
Robert Phillips9da87e02019-02-04 13:26:26 -0500512 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500513 if (taskGroup) {
514 taskGroup->add([this, pool]() {
515 this->reset(pool);
516 });
517 } else {
518 this->reset(pool);
519 }
520}
521
522void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
523 SkASSERT(pool->unique());
524 pool->reset(fGpu);
525 std::unique_lock<std::recursive_mutex> providerLock(fBackgroundMutex);
526 fAvailableCommandPools.push_back(pool);
527}
528
Greg Daniela870b462019-01-08 15:49:46 -0500529void GrVkResourceProvider::storePipelineCacheData() {
530 size_t dataSize = 0;
531 VkResult result = GR_VK_CALL(fGpu->vkInterface(), GetPipelineCacheData(fGpu->device(),
532 this->pipelineCache(),
533 &dataSize, nullptr));
534 SkASSERT(result == VK_SUCCESS);
535
536 std::unique_ptr<uint8_t[]> data(new uint8_t[dataSize]);
537
538 result = GR_VK_CALL(fGpu->vkInterface(), GetPipelineCacheData(fGpu->device(),
539 this->pipelineCache(),
540 &dataSize,
541 (void*)data.get()));
542 SkASSERT(result == VK_SUCCESS);
543
544 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
545 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
546
Robert Phillips9da87e02019-02-04 13:26:26 -0500547 fGpu->getContext()->priv().getPersistentCache()->store(
Greg Daniela870b462019-01-08 15:49:46 -0500548 *keyData, *SkData::MakeWithoutCopy(data.get(), dataSize));
549}
550
egdanield62e28b2016-06-07 08:43:30 -0700551////////////////////////////////////////////////////////////////////////////////
552
553GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
554 const GrVkGpu* gpu,
555 const GrVkRenderTarget& target)
556 : fLastReturnedIndex(0) {
557 fRenderPasses.emplace_back(new GrVkRenderPass());
558 fRenderPasses[0]->initSimple(gpu, target);
559}
560
561bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
562 const GrVkRenderTarget& target) const {
563 // The first GrVkRenderpass should always exists since we create the basic load store
564 // render pass on create
565 SkASSERT(fRenderPasses[0]);
566 return fRenderPasses[0]->isCompatible(target);
567}
568
egdaniel2feb0932016-06-08 06:48:09 -0700569GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
570 const GrVkGpu* gpu,
571 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700572 const GrVkRenderPass::LoadStoreOps& stencilOps) {
573 for (int i = 0; i < fRenderPasses.count(); ++i) {
574 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700575 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700576 fLastReturnedIndex = idx;
577 return fRenderPasses[idx];
578 }
579 }
egdaniel9cb63402016-06-23 08:37:05 -0700580 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700581 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700582 fLastReturnedIndex = fRenderPasses.count() - 1;
583 return renderPass;
584}
585
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500586void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(GrVkGpu* gpu) {
egdanield62e28b2016-06-07 08:43:30 -0700587 for (int i = 0; i < fRenderPasses.count(); ++i) {
588 if (fRenderPasses[i]) {
589 fRenderPasses[i]->unref(gpu);
590 fRenderPasses[i] = nullptr;
591 }
592 }
593}
594
595void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
596 for (int i = 0; i < fRenderPasses.count(); ++i) {
597 if (fRenderPasses[i]) {
598 fRenderPasses[i]->unrefAndAbandon();
599 fRenderPasses[i] = nullptr;
600 }
601 }
602}