blob: 0416fa6c27f9f07ddc81eb0e2867a513b4838c7b [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkResourceProvider.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/core/SkTaskGroup.h"
11#include "src/gpu/GrContextPriv.h"
Brian Salomon201cdbb2019-08-14 17:00:30 -040012#include "src/gpu/GrSamplerState.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/vk/GrVkCommandBuffer.h"
14#include "src/gpu/vk/GrVkCommandPool.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/vk/GrVkGpu.h"
16#include "src/gpu/vk/GrVkPipeline.h"
17#include "src/gpu/vk/GrVkRenderTarget.h"
18#include "src/gpu/vk/GrVkUniformBuffer.h"
19#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050020
21#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050022std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050023#endif
24
egdaniel778555c2016-05-02 06:50:36 -070025GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
26 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070027 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070028 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050029}
30
31GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070032 SkASSERT(0 == fRenderPassArray.count());
Greg Danielb46add82019-01-02 14:51:29 -050033 SkASSERT(0 == fExternalRenderPasses.count());
jvanverth03509ea2016-03-02 13:19:47 -080034 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070035 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080036}
37
Greg Daniela870b462019-01-08 15:49:46 -050038VkPipelineCache GrVkResourceProvider::pipelineCache() {
39 if (fPipelineCache == VK_NULL_HANDLE) {
40 VkPipelineCacheCreateInfo createInfo;
41 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
42 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
43 createInfo.pNext = nullptr;
44 createInfo.flags = 0;
egdaniel778555c2016-05-02 06:50:36 -070045
Robert Phillips9da87e02019-02-04 13:26:26 -050046 auto persistentCache = fGpu->getContext()->priv().getPersistentCache();
Greg Daniela870b462019-01-08 15:49:46 -050047 sk_sp<SkData> cached;
48 if (persistentCache) {
49 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
50 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
51 cached = persistentCache->load(*keyData);
52 }
53 bool usedCached = false;
54 if (cached) {
55 uint32_t* cacheHeader = (uint32_t*)cached->data();
56 if (cacheHeader[1] == VK_PIPELINE_CACHE_HEADER_VERSION_ONE) {
57 // For version one of the header, the total header size is 16 bytes plus
58 // VK_UUID_SIZE bytes. See Section 9.6 (Pipeline Cache) in the vulkan spec to see
59 // the breakdown of these bytes.
60 SkASSERT(cacheHeader[0] == 16 + VK_UUID_SIZE);
61 const VkPhysicalDeviceProperties& devProps = fGpu->physicalDeviceProperties();
62 const uint8_t* supportedPipelineCacheUUID = devProps.pipelineCacheUUID;
63 if (cacheHeader[2] == devProps.vendorID && cacheHeader[3] == devProps.deviceID &&
64 !memcmp(&cacheHeader[4], supportedPipelineCacheUUID, VK_UUID_SIZE)) {
65 createInfo.initialDataSize = cached->size();
66 createInfo.pInitialData = cached->data();
67 usedCached = true;
68 }
69 }
70 }
71 if (!usedCached) {
72 createInfo.initialDataSize = 0;
73 createInfo.pInitialData = nullptr;
74 }
Greg Danield034c622019-11-20 09:33:29 -050075
76 VkResult result;
77 GR_VK_CALL_RESULT(fGpu, result, CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
78 &fPipelineCache));
Greg Daniela870b462019-01-08 15:49:46 -050079 if (VK_SUCCESS != result) {
80 fPipelineCache = VK_NULL_HANDLE;
81 }
82 }
83 return fPipelineCache;
84}
85
86void GrVkResourceProvider::init() {
egdaniel707bbd62016-07-26 07:19:47 -070087 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040088 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
89 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070090 SkASSERT(1 == fDescriptorSetManagers.count());
91 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050092}
93
Robert Phillips901aff02019-10-08 12:32:56 -040094GrVkPipeline* GrVkResourceProvider::createPipeline(const GrProgramInfo& programInfo,
Greg Daniel164a9f02016-02-22 09:56:40 -050095 VkPipelineShaderStageCreateInfo* shaderStageInfo,
96 int shaderStageCount,
Greg Daniel99b88e02018-10-03 15:31:20 -040097 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -050098 VkPipelineLayout layout) {
Robert Phillipsa87c5292019-11-12 10:12:42 -050099 return GrVkPipeline::Create(fGpu, programInfo, shaderStageInfo,
Robert Phillipsfcaae482019-11-07 10:17:03 -0500100 shaderStageCount, compatibleRenderPass, layout,
101 this->pipelineCache());
Greg Daniel164a9f02016-02-22 09:56:40 -0500102}
103
Greg Daniel164a9f02016-02-22 09:56:40 -0500104// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700105// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500106// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700107const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700108GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
109 CompatibleRPHandle* compatibleHandle) {
110 for (int i = 0; i < fRenderPassArray.count(); ++i) {
111 if (fRenderPassArray[i].isCompatible(target)) {
112 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500113 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700114 if (compatibleHandle) {
115 *compatibleHandle = CompatibleRPHandle(i);
116 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500117 return renderPass;
118 }
119 }
120
Greg Danieled984762019-11-07 17:15:45 -0500121 GrVkRenderPass* renderPass = GrVkRenderPass::CreateSimple(fGpu, target);
122 if (!renderPass) {
123 return nullptr;
124 }
125 fRenderPassArray.emplace_back(renderPass);
egdanield62e28b2016-06-07 08:43:30 -0700126
127 if (compatibleHandle) {
128 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
129 }
130 return renderPass;
131}
132
133const GrVkRenderPass*
134GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
135 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
136 int index = compatibleHandle.toIndex();
137 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Danieled984762019-11-07 17:15:45 -0500138 SkASSERT(renderPass);
Greg Daniel164a9f02016-02-22 09:56:40 -0500139 renderPass->ref();
140 return renderPass;
141}
142
Greg Danielb46add82019-01-02 14:51:29 -0500143const GrVkRenderPass* GrVkResourceProvider::findCompatibleExternalRenderPass(
144 VkRenderPass renderPass, uint32_t colorAttachmentIndex) {
145 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
146 if (fExternalRenderPasses[i]->isCompatibleExternalRP(renderPass)) {
147 fExternalRenderPasses[i]->ref();
148#ifdef SK_DEBUG
149 uint32_t cachedColorIndex;
150 SkASSERT(fExternalRenderPasses[i]->colorAttachmentIndex(&cachedColorIndex));
151 SkASSERT(cachedColorIndex == colorAttachmentIndex);
152#endif
153 return fExternalRenderPasses[i];
154 }
155 }
156
157 const GrVkRenderPass* newRenderPass = new GrVkRenderPass(renderPass, colorAttachmentIndex);
158 fExternalRenderPasses.push_back(newRenderPass);
159 newRenderPass->ref();
160 return newRenderPass;
161}
162
egdaniel2feb0932016-06-08 06:48:09 -0700163const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
Greg Danielfa3adf72019-11-07 09:53:41 -0500164 GrVkRenderTarget* target,
egdaniel2feb0932016-06-08 06:48:09 -0700165 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700166 const GrVkRenderPass::LoadStoreOps& stencilOps,
167 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700168 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
169 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
170 : &tempRPHandle;
Greg Danielfa3adf72019-11-07 09:53:41 -0500171 *pRPHandle = target->compatibleRenderPassHandle();
Greg Danieled984762019-11-07 17:15:45 -0500172 if (!pRPHandle->isValid()) {
173 return nullptr;
174 }
egdaniel066df7c2016-06-08 14:02:27 -0700175
Greg Danield3682112016-10-03 15:06:07 -0400176 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700177}
178
179const GrVkRenderPass*
180GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
181 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700182 const GrVkRenderPass::LoadStoreOps& stencilOps) {
183 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
184 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
185 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
186 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700187 stencilOps);
Greg Danieled984762019-11-07 17:15:45 -0500188 if (!renderPass) {
189 return nullptr;
190 }
egdaniel2feb0932016-06-08 06:48:09 -0700191 renderPass->ref();
192 return renderPass;
193}
194
Greg Daniel164a9f02016-02-22 09:56:40 -0500195GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700196 VkDescriptorType type, uint32_t count) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500197 return GrVkDescriptorPool::Create(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500198}
199
Greg Daniel7e000222018-12-03 10:08:21 -0500200GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
Brian Salomonccb61422020-01-09 10:46:36 -0500201 GrSamplerState params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
Greg Daniel7e000222018-12-03 10:08:21 -0500202 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800203 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500204 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
205 if (!sampler) {
206 return nullptr;
207 }
egdaniel8b6394c2016-03-04 07:35:10 -0800208 fSamplers.add(sampler);
209 }
210 SkASSERT(sampler);
211 sampler->ref();
212 return sampler;
213}
214
Greg Daniel7e000222018-12-03 10:08:21 -0500215GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
216 const GrVkYcbcrConversionInfo& ycbcrInfo) {
217 GrVkSamplerYcbcrConversion* ycbcrConversion =
218 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
219 if (!ycbcrConversion) {
220 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
221 if (!ycbcrConversion) {
222 return nullptr;
223 }
224 fYcbcrConversions.add(ycbcrConversion);
225 }
226 SkASSERT(ycbcrConversion);
227 ycbcrConversion->ref();
228 return ycbcrConversion;
229}
230
Greg Daniel09eeefb2017-10-16 15:15:02 -0400231GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Robert Phillips901aff02019-10-08 12:32:56 -0400232 GrRenderTarget* renderTarget,
233 const GrProgramInfo& programInfo,
Greg Daniel99b88e02018-10-03 15:31:20 -0400234 VkRenderPass compatibleRenderPass) {
Austin Eng77fdf662020-02-07 01:26:16 +0000235 return fPipelineStateCache->refPipelineState(renderTarget, programInfo, compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700236}
237
Greg Daniela7543782017-05-02 14:01:43 -0400238void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
239 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700240 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700241 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400242 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
243 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700244 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400245 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700246 *handle = GrVkDescriptorSetManager::Handle(i);
247 return;
248 }
249 }
250
Greg Daniel18f96022017-05-04 15:09:03 -0400251 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
252 uniformHandler);
253 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700254 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
255}
256
Greg Daniela7543782017-05-02 14:01:43 -0400257void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
258 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700259 GrVkDescriptorSetManager::Handle* handle) {
260 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400261 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
262 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700263 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400264 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700265 *handle = GrVkDescriptorSetManager::Handle(i);
266 return;
267 }
268 }
269
Greg Daniel18f96022017-05-04 15:09:03 -0400270 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
271 visibilities);
272 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700273 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
274}
275
egdaniel707bbd62016-07-26 07:19:47 -0700276VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
277 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400278 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700279}
280
281VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
282 const GrVkDescriptorSetManager::Handle& handle) const {
283 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400284 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700285}
286
egdaniela95220d2016-07-21 11:50:37 -0700287const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
288 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400289 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
290 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700291}
292
egdaniel707bbd62016-07-26 07:19:47 -0700293const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
294 const GrVkDescriptorSetManager::Handle& handle) {
295 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400296 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700297}
egdaniela95220d2016-07-21 11:50:37 -0700298
299void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
300 const GrVkDescriptorSetManager::Handle& handle) {
301 SkASSERT(descSet);
302 SkASSERT(handle.isValid());
303 int managerIdx = handle.toIndex();
304 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400305 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700306}
307
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500308GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
309 std::unique_lock<std::recursive_mutex> lock(fBackgroundMutex);
310 GrVkCommandPool* result;
311 if (fAvailableCommandPools.count()) {
312 result = fAvailableCommandPools.back();
313 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700314 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500315 result = GrVkCommandPool::Create(fGpu);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500316 if (!result) {
317 return nullptr;
318 }
jvanverth7ec92412016-07-06 09:24:57 -0700319 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500320 SkASSERT(result->unique());
321 SkDEBUGCODE(
322 for (const GrVkCommandPool* pool : fActiveCommandPools) {
323 SkASSERT(pool != result);
324 }
325 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
326 SkASSERT(pool != result);
327 }
Ben Wagner1c0cacf2019-01-14 12:57:36 -0500328 )
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500329 fActiveCommandPools.push_back(result);
330 result->ref();
331 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500332}
333
334void GrVkResourceProvider::checkCommandBuffers() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500335 for (int i = fActiveCommandPools.count() - 1; i >= 0; --i) {
336 GrVkCommandPool* pool = fActiveCommandPools[i];
337 if (!pool->isOpen()) {
338 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
339 if (buffer->finished(fGpu)) {
340 fActiveCommandPools.removeShuffle(i);
341 this->backgroundReset(pool);
342 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500343 }
344 }
345}
346
Greg Daniela3aa75a2019-04-12 14:24:55 -0400347void GrVkResourceProvider::addFinishedProcToActiveCommandBuffers(
348 GrGpuFinishedProc finishedProc, GrGpuFinishedContext finishedContext) {
349 sk_sp<GrRefCntedCallback> procRef(new GrRefCntedCallback(finishedProc, finishedContext));
350 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
351 GrVkCommandPool* pool = fActiveCommandPools[i];
352 if (!pool->isOpen()) {
353 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
354 buffer->addFinishedProc(procRef);
355 }
356 }
357}
358
jvanverth4c6e47a2016-07-22 10:34:52 -0700359const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
360 const GrVkResource* resource = nullptr;
361 int count = fAvailableUniformBufferResources.count();
362 if (count > 0) {
363 resource = fAvailableUniformBufferResources[count - 1];
364 fAvailableUniformBufferResources.removeShuffle(count - 1);
365 } else {
366 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
367 }
368 return resource;
369}
370
371void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
372 fAvailableUniformBufferResources.push_back(resource);
373}
374
Jim Van Verth09557d72016-11-07 11:10:21 -0500375void GrVkResourceProvider::destroyResources(bool deviceLost) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500376 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500377 if (taskGroup) {
378 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000379 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000380
egdanield62e28b2016-06-07 08:43:30 -0700381 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
382 for (int i = 0; i < fRenderPassArray.count(); ++i) {
383 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500384 }
egdanield62e28b2016-06-07 08:43:30 -0700385 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500386
Greg Danielb46add82019-01-02 14:51:29 -0500387 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
388 fExternalRenderPasses[i]->unref(fGpu);
389 }
390 fExternalRenderPasses.reset();
391
egdaniel8b6394c2016-03-04 07:35:10 -0800392 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700393 for (decltype(fSamplers)::Iter iter(&fSamplers); !iter.done(); ++iter) {
egdaniel8b6394c2016-03-04 07:35:10 -0800394 (*iter).unref(fGpu);
395 }
396 fSamplers.reset();
397
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700398 for (decltype(fYcbcrConversions)::Iter iter(&fYcbcrConversions); !iter.done(); ++iter) {
399 (*iter).unref(fGpu);
400 }
401 fYcbcrConversions.reset();
402
egdaniel22281c12016-03-23 13:49:40 -0700403 fPipelineStateCache->release();
404
jvanverth03509ea2016-03-02 13:19:47 -0800405 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
406 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700407
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500408 for (GrVkCommandPool* pool : fActiveCommandPools) {
409 SkASSERT(pool->unique());
410 pool->unref(fGpu);
411 }
412 fActiveCommandPools.reset();
413
414 for (GrVkCommandPool* pool : fAvailableCommandPools) {
415 SkASSERT(pool->unique());
416 pool->unref(fGpu);
417 }
418 fAvailableCommandPools.reset();
419
egdaniela95220d2016-07-21 11:50:37 -0700420 // We must release/destroy all command buffers and pipeline states before releasing the
421 // GrVkDescriptorSetManagers
422 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400423 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700424 }
425 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700426
427 // release our uniform buffers
428 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
429 SkASSERT(fAvailableUniformBufferResources[i]->unique());
430 fAvailableUniformBufferResources[i]->unref(fGpu);
431 }
432 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500433}
434
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500435void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
Brian Salomone39526b2019-06-24 16:35:53 -0400436 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500437 SkASSERT(pool->unique());
438 pool->releaseResources(fGpu);
Robert Phillips9da87e02019-02-04 13:26:26 -0500439 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500440 if (taskGroup) {
441 taskGroup->add([this, pool]() {
442 this->reset(pool);
443 });
444 } else {
445 this->reset(pool);
446 }
447}
448
449void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
Brian Salomone39526b2019-06-24 16:35:53 -0400450 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500451 SkASSERT(pool->unique());
452 pool->reset(fGpu);
453 std::unique_lock<std::recursive_mutex> providerLock(fBackgroundMutex);
454 fAvailableCommandPools.push_back(pool);
455}
456
Greg Daniela870b462019-01-08 15:49:46 -0500457void GrVkResourceProvider::storePipelineCacheData() {
Greg Danield034c622019-11-20 09:33:29 -0500458 if (this->pipelineCache() == VK_NULL_HANDLE) {
459 return;
460 }
Greg Daniela870b462019-01-08 15:49:46 -0500461 size_t dataSize = 0;
Greg Danield034c622019-11-20 09:33:29 -0500462 VkResult result;
463 GR_VK_CALL_RESULT(fGpu, result, GetPipelineCacheData(fGpu->device(), this->pipelineCache(),
464 &dataSize, nullptr));
465 if (result != VK_SUCCESS) {
466 return;
467 }
Greg Daniela870b462019-01-08 15:49:46 -0500468
469 std::unique_ptr<uint8_t[]> data(new uint8_t[dataSize]);
470
Greg Danield034c622019-11-20 09:33:29 -0500471 GR_VK_CALL_RESULT(fGpu, result, GetPipelineCacheData(fGpu->device(), this->pipelineCache(),
472 &dataSize, (void*)data.get()));
473 if (result != VK_SUCCESS) {
474 return;
475 }
Greg Daniela870b462019-01-08 15:49:46 -0500476
477 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
478 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
479
Robert Phillips9da87e02019-02-04 13:26:26 -0500480 fGpu->getContext()->priv().getPersistentCache()->store(
Greg Daniela870b462019-01-08 15:49:46 -0500481 *keyData, *SkData::MakeWithoutCopy(data.get(), dataSize));
482}
483
egdanield62e28b2016-06-07 08:43:30 -0700484////////////////////////////////////////////////////////////////////////////////
485
Greg Danieled984762019-11-07 17:15:45 -0500486GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(GrVkRenderPass* renderPass)
487 : fLastReturnedIndex(0) {
488 renderPass->ref();
489 fRenderPasses.push_back(renderPass);
egdanield62e28b2016-06-07 08:43:30 -0700490}
491
492bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
Greg Daniele643da62019-11-05 12:36:42 -0500493 const GrVkRenderTarget& target) const {
egdanield62e28b2016-06-07 08:43:30 -0700494 // The first GrVkRenderpass should always exists since we create the basic load store
495 // render pass on create
496 SkASSERT(fRenderPasses[0]);
497 return fRenderPasses[0]->isCompatible(target);
498}
499
egdaniel2feb0932016-06-08 06:48:09 -0700500GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
Greg Daniele643da62019-11-05 12:36:42 -0500501 GrVkGpu* gpu,
502 const GrVkRenderPass::LoadStoreOps& colorOps,
503 const GrVkRenderPass::LoadStoreOps& stencilOps) {
egdaniel2feb0932016-06-08 06:48:09 -0700504 for (int i = 0; i < fRenderPasses.count(); ++i) {
505 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700506 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700507 fLastReturnedIndex = idx;
508 return fRenderPasses[idx];
509 }
510 }
Greg Danieled984762019-11-07 17:15:45 -0500511 GrVkRenderPass* renderPass = GrVkRenderPass::Create(gpu, *this->getCompatibleRenderPass(),
512 colorOps, stencilOps);
513 if (!renderPass) {
514 return nullptr;
515 }
516 fRenderPasses.push_back(renderPass);
egdaniel2feb0932016-06-08 06:48:09 -0700517 fLastReturnedIndex = fRenderPasses.count() - 1;
518 return renderPass;
519}
520
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500521void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(GrVkGpu* gpu) {
egdanield62e28b2016-06-07 08:43:30 -0700522 for (int i = 0; i < fRenderPasses.count(); ++i) {
523 if (fRenderPasses[i]) {
524 fRenderPasses[i]->unref(gpu);
525 fRenderPasses[i] = nullptr;
526 }
527 }
528}
529