blob: c359915f23f25b352ac9c17998a7f333f1ab1caa [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkResourceProvider.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Adlai Holler3d0359a2020-07-09 15:35:55 -040010#include "include/gpu/GrDirectContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/core/SkTaskGroup.h"
Robert Phillips06273bc2021-08-11 15:43:50 -040012#include "src/core/SkTraceEvent.h"
Adlai Hollera0693042020-10-14 11:23:11 -040013#include "src/gpu/GrDirectContextPriv.h"
Brian Salomon201cdbb2019-08-14 17:00:30 -040014#include "src/gpu/GrSamplerState.h"
Greg Daniela8c32102020-12-30 15:09:32 -050015#include "src/gpu/GrStencilSettings.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/gpu/vk/GrVkCommandBuffer.h"
17#include "src/gpu/vk/GrVkCommandPool.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkGpu.h"
19#include "src/gpu/vk/GrVkPipeline.h"
20#include "src/gpu/vk/GrVkRenderTarget.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050021#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050022
egdaniel778555c2016-05-02 06:50:36 -070023GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
24 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070025 , fPipelineCache(VK_NULL_HANDLE) {
Robert Phillipsae67c522021-03-03 11:03:38 -050026 fPipelineStateCache = sk_make_sp<PipelineStateCache>(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050027}
28
29GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070030 SkASSERT(0 == fRenderPassArray.count());
Greg Danielb46add82019-01-02 14:51:29 -050031 SkASSERT(0 == fExternalRenderPasses.count());
Greg Daniela8c32102020-12-30 15:09:32 -050032 SkASSERT(0 == fMSAALoadPipelines.count());
jvanverth03509ea2016-03-02 13:19:47 -080033 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
34}
35
Greg Daniela870b462019-01-08 15:49:46 -050036VkPipelineCache GrVkResourceProvider::pipelineCache() {
37 if (fPipelineCache == VK_NULL_HANDLE) {
38 VkPipelineCacheCreateInfo createInfo;
39 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
40 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
41 createInfo.pNext = nullptr;
42 createInfo.flags = 0;
egdaniel778555c2016-05-02 06:50:36 -070043
Robert Phillips9da87e02019-02-04 13:26:26 -050044 auto persistentCache = fGpu->getContext()->priv().getPersistentCache();
Greg Daniela870b462019-01-08 15:49:46 -050045 sk_sp<SkData> cached;
46 if (persistentCache) {
47 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
48 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
49 cached = persistentCache->load(*keyData);
50 }
51 bool usedCached = false;
52 if (cached) {
53 uint32_t* cacheHeader = (uint32_t*)cached->data();
54 if (cacheHeader[1] == VK_PIPELINE_CACHE_HEADER_VERSION_ONE) {
55 // For version one of the header, the total header size is 16 bytes plus
56 // VK_UUID_SIZE bytes. See Section 9.6 (Pipeline Cache) in the vulkan spec to see
57 // the breakdown of these bytes.
58 SkASSERT(cacheHeader[0] == 16 + VK_UUID_SIZE);
59 const VkPhysicalDeviceProperties& devProps = fGpu->physicalDeviceProperties();
60 const uint8_t* supportedPipelineCacheUUID = devProps.pipelineCacheUUID;
61 if (cacheHeader[2] == devProps.vendorID && cacheHeader[3] == devProps.deviceID &&
62 !memcmp(&cacheHeader[4], supportedPipelineCacheUUID, VK_UUID_SIZE)) {
63 createInfo.initialDataSize = cached->size();
64 createInfo.pInitialData = cached->data();
65 usedCached = true;
66 }
67 }
68 }
69 if (!usedCached) {
70 createInfo.initialDataSize = 0;
71 createInfo.pInitialData = nullptr;
72 }
Greg Danield034c622019-11-20 09:33:29 -050073
74 VkResult result;
75 GR_VK_CALL_RESULT(fGpu, result, CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
76 &fPipelineCache));
Greg Daniela870b462019-01-08 15:49:46 -050077 if (VK_SUCCESS != result) {
78 fPipelineCache = VK_NULL_HANDLE;
79 }
80 }
81 return fPipelineCache;
82}
83
84void GrVkResourceProvider::init() {
egdaniel707bbd62016-07-26 07:19:47 -070085 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040086 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
87 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070088 SkASSERT(1 == fDescriptorSetManagers.count());
89 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Danielf32fec12020-09-08 13:05:32 -040090 dsm = GrVkDescriptorSetManager::CreateInputManager(fGpu);
91 fDescriptorSetManagers.emplace_back(dsm);
92 SkASSERT(2 == fDescriptorSetManagers.count());
93 fInputDSHandle = GrVkDescriptorSetManager::Handle(1);
Greg Daniel164a9f02016-02-22 09:56:40 -050094}
95
Greg Daniel3ef052c2021-01-05 12:20:27 -050096sk_sp<const GrVkPipeline> GrVkResourceProvider::makePipeline(
97 const GrProgramInfo& programInfo,
98 VkPipelineShaderStageCreateInfo* shaderStageInfo,
99 int shaderStageCount,
100 VkRenderPass compatibleRenderPass,
Greg Daniel91b37b12021-01-05 15:40:54 -0500101 VkPipelineLayout layout,
102 uint32_t subpass) {
Greg Daniel3ef052c2021-01-05 12:20:27 -0500103 return GrVkPipeline::Make(fGpu, programInfo, shaderStageInfo, shaderStageCount,
Greg Daniel91b37b12021-01-05 15:40:54 -0500104 compatibleRenderPass, layout, this->pipelineCache(), subpass);
Greg Daniel164a9f02016-02-22 09:56:40 -0500105}
106
Greg Daniel164a9f02016-02-22 09:56:40 -0500107// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700108// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500109// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700110const GrVkRenderPass*
Greg Daniel60ec6172021-04-16 11:31:58 -0400111GrVkResourceProvider::findCompatibleRenderPass(GrVkRenderTarget* target,
Robert Phillips96f22372020-05-20 12:31:18 -0400112 CompatibleRPHandle* compatibleHandle,
Greg Daniel7acddf52020-12-16 15:15:51 -0500113 bool withResolve,
Greg Danieled629c12020-08-14 13:11:18 -0400114 bool withStencil,
Greg Daniel7acddf52020-12-16 15:15:51 -0500115 SelfDependencyFlags selfDepFlags,
116 LoadFromResolve loadFromResolve) {
Robert Phillips8bd07192020-05-12 13:26:56 -0400117 // Get attachment information from render target. This includes which attachments the render
118 // target has (color, stencil) and the attachments format and sample count.
119 GrVkRenderPass::AttachmentFlags attachmentFlags;
120 GrVkRenderPass::AttachmentsDescriptor attachmentsDesc;
Greg Daniel60ec6172021-04-16 11:31:58 -0400121 target->getAttachmentsDescriptor(&attachmentsDesc, &attachmentFlags, withResolve, withStencil);
Robert Phillips8bd07192020-05-12 13:26:56 -0400122
Greg Daniel21774362020-09-14 10:36:43 -0400123 return this->findCompatibleRenderPass(&attachmentsDesc, attachmentFlags, selfDepFlags,
Greg Daniel7acddf52020-12-16 15:15:51 -0500124 loadFromResolve, compatibleHandle);
Robert Phillips8bd07192020-05-12 13:26:56 -0400125}
126
127const GrVkRenderPass*
128GrVkResourceProvider::findCompatibleRenderPass(GrVkRenderPass::AttachmentsDescriptor* desc,
129 GrVkRenderPass::AttachmentFlags attachmentFlags,
Greg Daniel21774362020-09-14 10:36:43 -0400130 SelfDependencyFlags selfDepFlags,
Greg Daniel7acddf52020-12-16 15:15:51 -0500131 LoadFromResolve loadFromResolve,
Robert Phillips8bd07192020-05-12 13:26:56 -0400132 CompatibleRPHandle* compatibleHandle) {
egdanield62e28b2016-06-07 08:43:30 -0700133 for (int i = 0; i < fRenderPassArray.count(); ++i) {
Greg Daniel7acddf52020-12-16 15:15:51 -0500134 if (fRenderPassArray[i].isCompatible(*desc, attachmentFlags, selfDepFlags,
135 loadFromResolve)) {
egdanield62e28b2016-06-07 08:43:30 -0700136 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500137 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700138 if (compatibleHandle) {
139 *compatibleHandle = CompatibleRPHandle(i);
140 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500141 return renderPass;
142 }
143 }
144
Greg Danielb4189212020-08-10 11:46:30 -0400145 GrVkRenderPass* renderPass = GrVkRenderPass::CreateSimple(fGpu, desc, attachmentFlags,
Greg Daniel7acddf52020-12-16 15:15:51 -0500146 selfDepFlags, loadFromResolve);
Greg Danieled984762019-11-07 17:15:45 -0500147 if (!renderPass) {
148 return nullptr;
149 }
150 fRenderPassArray.emplace_back(renderPass);
egdanield62e28b2016-06-07 08:43:30 -0700151
152 if (compatibleHandle) {
153 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
154 }
155 return renderPass;
156}
157
Greg Danielb46add82019-01-02 14:51:29 -0500158const GrVkRenderPass* GrVkResourceProvider::findCompatibleExternalRenderPass(
159 VkRenderPass renderPass, uint32_t colorAttachmentIndex) {
160 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
161 if (fExternalRenderPasses[i]->isCompatibleExternalRP(renderPass)) {
162 fExternalRenderPasses[i]->ref();
163#ifdef SK_DEBUG
164 uint32_t cachedColorIndex;
165 SkASSERT(fExternalRenderPasses[i]->colorAttachmentIndex(&cachedColorIndex));
166 SkASSERT(cachedColorIndex == colorAttachmentIndex);
167#endif
168 return fExternalRenderPasses[i];
169 }
170 }
171
Jim Van Verth5082df12020-03-11 16:14:51 -0400172 const GrVkRenderPass* newRenderPass = new GrVkRenderPass(fGpu, renderPass,
173 colorAttachmentIndex);
Greg Danielb46add82019-01-02 14:51:29 -0500174 fExternalRenderPasses.push_back(newRenderPass);
175 newRenderPass->ref();
176 return newRenderPass;
177}
178
egdaniel2feb0932016-06-08 06:48:09 -0700179const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
Greg Danieled629c12020-08-14 13:11:18 -0400180 GrVkRenderTarget* target,
181 const GrVkRenderPass::LoadStoreOps& colorOps,
Greg Daniel7acddf52020-12-16 15:15:51 -0500182 const GrVkRenderPass::LoadStoreOps& resolveOps,
Greg Danieled629c12020-08-14 13:11:18 -0400183 const GrVkRenderPass::LoadStoreOps& stencilOps,
184 CompatibleRPHandle* compatibleHandle,
Greg Daniel7acddf52020-12-16 15:15:51 -0500185 bool withResolve,
Greg Danieled629c12020-08-14 13:11:18 -0400186 bool withStencil,
Greg Daniel7acddf52020-12-16 15:15:51 -0500187 SelfDependencyFlags selfDepFlags,
188 LoadFromResolve loadFromResolve) {
egdaniel066df7c2016-06-08 14:02:27 -0700189 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
190 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
191 : &tempRPHandle;
Greg Daniel7acddf52020-12-16 15:15:51 -0500192 *pRPHandle = target->compatibleRenderPassHandle(withResolve, withStencil, selfDepFlags,
193 loadFromResolve);
Greg Danieled984762019-11-07 17:15:45 -0500194 if (!pRPHandle->isValid()) {
195 return nullptr;
196 }
egdaniel066df7c2016-06-08 14:02:27 -0700197
Greg Daniel7acddf52020-12-16 15:15:51 -0500198 return this->findRenderPass(*pRPHandle, colorOps, resolveOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700199}
200
201const GrVkRenderPass*
202GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
203 const GrVkRenderPass::LoadStoreOps& colorOps,
Greg Daniel7acddf52020-12-16 15:15:51 -0500204 const GrVkRenderPass::LoadStoreOps& resolveOps,
egdaniel2feb0932016-06-08 06:48:09 -0700205 const GrVkRenderPass::LoadStoreOps& stencilOps) {
206 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
207 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
208 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
209 colorOps,
Greg Daniel7acddf52020-12-16 15:15:51 -0500210 resolveOps,
egdaniel2feb0932016-06-08 06:48:09 -0700211 stencilOps);
Greg Danieled984762019-11-07 17:15:45 -0500212 if (!renderPass) {
213 return nullptr;
214 }
egdaniel2feb0932016-06-08 06:48:09 -0700215 renderPass->ref();
216 return renderPass;
217}
218
Greg Daniel164a9f02016-02-22 09:56:40 -0500219GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700220 VkDescriptorType type, uint32_t count) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500221 return GrVkDescriptorPool::Create(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500222}
223
Greg Daniel7e000222018-12-03 10:08:21 -0500224GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
Brian Salomonccb61422020-01-09 10:46:36 -0500225 GrSamplerState params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
Greg Daniel7e000222018-12-03 10:08:21 -0500226 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800227 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500228 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
229 if (!sampler) {
230 return nullptr;
231 }
egdaniel8b6394c2016-03-04 07:35:10 -0800232 fSamplers.add(sampler);
233 }
234 SkASSERT(sampler);
235 sampler->ref();
236 return sampler;
237}
238
Greg Daniel7e000222018-12-03 10:08:21 -0500239GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
240 const GrVkYcbcrConversionInfo& ycbcrInfo) {
241 GrVkSamplerYcbcrConversion* ycbcrConversion =
242 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
243 if (!ycbcrConversion) {
244 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
245 if (!ycbcrConversion) {
246 return nullptr;
247 }
248 fYcbcrConversions.add(ycbcrConversion);
249 }
250 SkASSERT(ycbcrConversion);
251 ycbcrConversion->ref();
252 return ycbcrConversion;
253}
254
Greg Daniel09eeefb2017-10-16 15:15:02 -0400255GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Robert Phillips901aff02019-10-08 12:32:56 -0400256 GrRenderTarget* renderTarget,
257 const GrProgramInfo& programInfo,
Greg Daniel91b37b12021-01-05 15:40:54 -0500258 VkRenderPass compatibleRenderPass,
259 bool overrideSubpassForResolveLoad) {
Stephen Whiteb1857852020-02-07 15:33:23 +0000260 return fPipelineStateCache->findOrCreatePipelineState(renderTarget, programInfo,
Greg Daniel91b37b12021-01-05 15:40:54 -0500261 compatibleRenderPass,
262 overrideSubpassForResolveLoad);
egdaniel22281c12016-03-23 13:49:40 -0700263}
264
Robert Phillipsf497c362020-05-12 10:35:18 -0400265GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
266 const GrProgramDesc& desc,
267 const GrProgramInfo& programInfo,
268 VkRenderPass compatibleRenderPass,
Robert Phillipsae67c522021-03-03 11:03:38 -0500269 GrThreadSafePipelineBuilder::Stats::ProgramCacheResult* stat) {
Robert Phillipsf497c362020-05-12 10:35:18 -0400270
271 auto tmp = fPipelineStateCache->findOrCreatePipelineState(desc, programInfo,
272 compatibleRenderPass, stat);
273 if (!tmp) {
Robert Phillipsae67c522021-03-03 11:03:38 -0500274 fPipelineStateCache->stats()->incNumPreCompilationFailures();
Robert Phillipsf497c362020-05-12 10:35:18 -0400275 } else {
Robert Phillipsae67c522021-03-03 11:03:38 -0500276 fPipelineStateCache->stats()->incNumPreProgramCacheResult(*stat);
Robert Phillipsf497c362020-05-12 10:35:18 -0400277 }
278
279 return tmp;
280}
281
Greg Daniel3ef052c2021-01-05 12:20:27 -0500282sk_sp<const GrVkPipeline> GrVkResourceProvider::findOrCreateMSAALoadPipeline(
Greg Daniela8c32102020-12-30 15:09:32 -0500283 const GrVkRenderPass& renderPass,
Greg Daniel10344252021-04-22 09:52:25 -0400284 int numSamples,
Greg Daniela8c32102020-12-30 15:09:32 -0500285 VkPipelineShaderStageCreateInfo* shaderStageInfo,
286 VkPipelineLayout pipelineLayout) {
287 // Find or Create a compatible pipeline
Greg Daniel3ef052c2021-01-05 12:20:27 -0500288 sk_sp<const GrVkPipeline> pipeline;
Greg Daniela8c32102020-12-30 15:09:32 -0500289 for (int i = 0; i < fMSAALoadPipelines.count() && !pipeline; ++i) {
290 if (fMSAALoadPipelines[i].fRenderPass->isCompatible(renderPass)) {
291 pipeline = fMSAALoadPipelines[i].fPipeline;
292 }
293 }
294 if (!pipeline) {
Greg Daniel3ef052c2021-01-05 12:20:27 -0500295 pipeline = GrVkPipeline::Make(
Greg Daniela8c32102020-12-30 15:09:32 -0500296 fGpu,
Robert Phillips787fd9d2021-03-22 14:48:09 -0400297 /*vertexAttribs=*/GrGeometryProcessor::AttributeSet(),
298 /*instanceAttribs=*/GrGeometryProcessor::AttributeSet(),
Greg Daniela8c32102020-12-30 15:09:32 -0500299 GrPrimitiveType::kTriangleStrip,
300 kTopLeft_GrSurfaceOrigin,
301 GrStencilSettings(),
Greg Daniel10344252021-04-22 09:52:25 -0400302 numSamples,
Greg Daniela8c32102020-12-30 15:09:32 -0500303 /*isHWantialiasState=*/false,
Greg Daniela8c32102020-12-30 15:09:32 -0500304 GrXferProcessor::BlendInfo(),
305 /*isWireframe=*/false,
306 /*useConservativeRaster=*/false,
307 /*subpass=*/0,
308 shaderStageInfo,
309 /*shaderStageCount=*/2,
310 renderPass.vkRenderPass(),
311 pipelineLayout,
312 /*ownsLayout=*/false,
313 this->pipelineCache());
314 if (!pipeline) {
315 return nullptr;
316 }
317 fMSAALoadPipelines.push_back({pipeline, &renderPass});
318 }
319 SkASSERT(pipeline);
Greg Daniela8c32102020-12-30 15:09:32 -0500320 return pipeline;
321}
322
323void GrVkResourceProvider::getZeroSamplerDescriptorSetHandle(
324 GrVkDescriptorSetManager::Handle* handle) {
325 SkASSERT(handle);
326 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
327 if (fDescriptorSetManagers[i]->isZeroSampler()) {
328 *handle = GrVkDescriptorSetManager::Handle(i);
329 return;
330 }
331 }
332
333 GrVkDescriptorSetManager* dsm =
334 GrVkDescriptorSetManager::CreateZeroSamplerManager(fGpu);
335 fDescriptorSetManagers.emplace_back(dsm);
336 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
337}
338
Greg Daniela7543782017-05-02 14:01:43 -0400339void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
340 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700341 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700342 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400343 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
344 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700345 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400346 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700347 *handle = GrVkDescriptorSetManager::Handle(i);
348 return;
349 }
350 }
351
Greg Daniel18f96022017-05-04 15:09:03 -0400352 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
353 uniformHandler);
354 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700355 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
356}
357
egdaniel707bbd62016-07-26 07:19:47 -0700358VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
359 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400360 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700361}
362
Greg Danielf32fec12020-09-08 13:05:32 -0400363VkDescriptorSetLayout GrVkResourceProvider::getInputDSLayout() const {
364 SkASSERT(fInputDSHandle.isValid());
365 return fDescriptorSetManagers[fInputDSHandle.toIndex()]->layout();
366}
367
egdaniel707bbd62016-07-26 07:19:47 -0700368VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
369 const GrVkDescriptorSetManager::Handle& handle) const {
370 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400371 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700372}
373
egdaniela95220d2016-07-21 11:50:37 -0700374const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
375 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400376 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
377 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700378}
379
Greg Danielf32fec12020-09-08 13:05:32 -0400380const GrVkDescriptorSet* GrVkResourceProvider::getInputDescriptorSet() {
381 SkASSERT(fInputDSHandle.isValid());
382 return fDescriptorSetManagers[fInputDSHandle.toIndex()]->getDescriptorSet(fGpu, fInputDSHandle);
383}
384
egdaniel707bbd62016-07-26 07:19:47 -0700385const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
386 const GrVkDescriptorSetManager::Handle& handle) {
387 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400388 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700389}
egdaniela95220d2016-07-21 11:50:37 -0700390
391void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
392 const GrVkDescriptorSetManager::Handle& handle) {
393 SkASSERT(descSet);
394 SkASSERT(handle.isValid());
395 int managerIdx = handle.toIndex();
396 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400397 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700398}
399
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500400GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
Greg Daniel0a6cd5a2021-03-31 13:04:47 -0400401 SkAutoMutexExclusive lock(fBackgroundMutex);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500402 GrVkCommandPool* result;
403 if (fAvailableCommandPools.count()) {
404 result = fAvailableCommandPools.back();
405 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700406 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500407 result = GrVkCommandPool::Create(fGpu);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500408 if (!result) {
409 return nullptr;
410 }
jvanverth7ec92412016-07-06 09:24:57 -0700411 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500412 SkASSERT(result->unique());
413 SkDEBUGCODE(
414 for (const GrVkCommandPool* pool : fActiveCommandPools) {
415 SkASSERT(pool != result);
416 }
417 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
418 SkASSERT(pool != result);
419 }
Ben Wagner1c0cacf2019-01-14 12:57:36 -0500420 )
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500421 fActiveCommandPools.push_back(result);
422 result->ref();
423 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500424}
425
426void GrVkResourceProvider::checkCommandBuffers() {
Greg Danielee792d62020-11-24 14:02:04 -0500427 // When resetting a command buffer it can trigger client provided procs (e.g. release or
428 // finished) to be called. During these calls the client could trigger us to abandon the vk
429 // context, e.g. if we are in a DEVICE_LOST state. When we abandon the vk context we will
430 // unref all the fActiveCommandPools and reset the array. Since this can happen in the middle
431 // of the loop here, we need to additionally check that fActiveCommandPools still has pools on
432 // each iteration.
433 //
434 // TODO: We really need to have a more robust way to protect us from client proc calls that
435 // happen in the middle of us doing work. This may be just one of many potential pitfalls that
436 // could happen from the client triggering GrDirectContext changes during a proc call.
437 for (int i = fActiveCommandPools.count() - 1; fActiveCommandPools.count() && i >= 0; --i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500438 GrVkCommandPool* pool = fActiveCommandPools[i];
439 if (!pool->isOpen()) {
440 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
441 if (buffer->finished(fGpu)) {
442 fActiveCommandPools.removeShuffle(i);
Greg Danielee792d62020-11-24 14:02:04 -0500443 // This passes ownership of the pool to the backgroundReset call. The pool should
444 // not be used again from this function.
445 // TODO: We should see if we can use sk_sps here to make this more explicit.
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500446 this->backgroundReset(pool);
447 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500448 }
449 }
450}
451
Greg Danielfda45862021-02-09 17:55:07 -0500452void GrVkResourceProvider::forceSyncAllCommandBuffers() {
453 for (int i = fActiveCommandPools.count() - 1; fActiveCommandPools.count() && i >= 0; --i) {
454 GrVkCommandPool* pool = fActiveCommandPools[i];
455 if (!pool->isOpen()) {
456 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
457 buffer->forceSync(fGpu);
458 }
459 }
460}
461
Greg Daniela3aa75a2019-04-12 14:24:55 -0400462void GrVkResourceProvider::addFinishedProcToActiveCommandBuffers(
Greg Daniel288ecf62020-06-05 10:22:26 -0400463 sk_sp<GrRefCntedCallback> finishedCallback) {
Greg Daniela3aa75a2019-04-12 14:24:55 -0400464 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
465 GrVkCommandPool* pool = fActiveCommandPools[i];
Greg Danielfe159622020-04-10 17:43:51 +0000466 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
Greg Daniel288ecf62020-06-05 10:22:26 -0400467 buffer->addFinishedProc(finishedCallback);
Greg Daniela3aa75a2019-04-12 14:24:55 -0400468 }
469}
470
Greg Daniela89b4302021-01-29 10:48:40 -0500471void GrVkResourceProvider::destroyResources() {
Robert Phillips9da87e02019-02-04 13:26:26 -0500472 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500473 if (taskGroup) {
474 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000475 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000476
Greg Daniela8c32102020-12-30 15:09:32 -0500477 // Release all msaa load pipelines
Greg Daniela8c32102020-12-30 15:09:32 -0500478 fMSAALoadPipelines.reset();
479
egdanield62e28b2016-06-07 08:43:30 -0700480 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
481 for (int i = 0; i < fRenderPassArray.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400482 fRenderPassArray[i].releaseResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500483 }
egdanield62e28b2016-06-07 08:43:30 -0700484 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500485
Greg Danielb46add82019-01-02 14:51:29 -0500486 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400487 fExternalRenderPasses[i]->unref();
Greg Danielb46add82019-01-02 14:51:29 -0500488 }
489 fExternalRenderPasses.reset();
490
Greg Daniel428523f2021-03-30 14:22:54 -0400491 // Iterate through all store GrVkSamplers and unref them before resetting the hash table.
Mike Kleincff63962020-03-14 16:22:45 -0500492 fSamplers.foreach([&](auto* elt) { elt->unref(); });
egdaniel8b6394c2016-03-04 07:35:10 -0800493 fSamplers.reset();
494
Mike Kleincff63962020-03-14 16:22:45 -0500495 fYcbcrConversions.foreach([&](auto* elt) { elt->unref(); });
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700496 fYcbcrConversions.reset();
497
egdaniel22281c12016-03-23 13:49:40 -0700498 fPipelineStateCache->release();
499
jvanverth03509ea2016-03-02 13:19:47 -0800500 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
501 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700502
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500503 for (GrVkCommandPool* pool : fActiveCommandPools) {
504 SkASSERT(pool->unique());
Jim Van Verth5082df12020-03-11 16:14:51 -0400505 pool->unref();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500506 }
507 fActiveCommandPools.reset();
508
Greg Daniel0a6cd5a2021-03-31 13:04:47 -0400509 {
510 SkAutoMutexExclusive lock(fBackgroundMutex);
511 for (GrVkCommandPool* pool : fAvailableCommandPools) {
512 SkASSERT(pool->unique());
513 pool->unref();
514 }
515 fAvailableCommandPools.reset();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500516 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500517
Greg Daniela31ab4b2020-03-11 13:59:38 -0400518 // We must release/destroy all command buffers and pipeline states before releasing the
519 // GrVkDescriptorSetManagers. Additionally, we must release all uniform buffers since they hold
520 // refs to GrVkDescriptorSets.
521 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
522 fDescriptorSetManagers[i]->release(fGpu);
523 }
524 fDescriptorSetManagers.reset();
525
Greg Daniel164a9f02016-02-22 09:56:40 -0500526}
527
Greg Daniel428523f2021-03-30 14:22:54 -0400528void GrVkResourceProvider::releaseUnlockedBackendObjects() {
Greg Daniel0a6cd5a2021-03-31 13:04:47 -0400529 SkAutoMutexExclusive lock(fBackgroundMutex);
Greg Daniel428523f2021-03-30 14:22:54 -0400530 for (GrVkCommandPool* pool : fAvailableCommandPools) {
531 SkASSERT(pool->unique());
532 pool->unref();
533 }
534 fAvailableCommandPools.reset();
535}
536
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500537void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
Brian Salomone39526b2019-06-24 16:35:53 -0400538 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500539 SkASSERT(pool->unique());
Jim Van Verth5082df12020-03-11 16:14:51 -0400540 pool->releaseResources();
Greg Danielee792d62020-11-24 14:02:04 -0500541 // After releasing resources we may have called a client callback proc which may have
542 // disconnected the GrVkGpu. In that case we do not want to push the pool back onto the cache,
543 // but instead just drop the pool.
544 if (fGpu->disconnected()) {
545 pool->unref();
546 return;
547 }
Robert Phillips9da87e02019-02-04 13:26:26 -0500548 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500549 if (taskGroup) {
550 taskGroup->add([this, pool]() {
551 this->reset(pool);
552 });
553 } else {
554 this->reset(pool);
555 }
556}
557
558void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
Brian Salomone39526b2019-06-24 16:35:53 -0400559 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500560 SkASSERT(pool->unique());
561 pool->reset(fGpu);
Greg Daniel0a6cd5a2021-03-31 13:04:47 -0400562 SkAutoMutexExclusive lock(fBackgroundMutex);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500563 fAvailableCommandPools.push_back(pool);
564}
565
Greg Daniela870b462019-01-08 15:49:46 -0500566void GrVkResourceProvider::storePipelineCacheData() {
Greg Danield034c622019-11-20 09:33:29 -0500567 if (this->pipelineCache() == VK_NULL_HANDLE) {
568 return;
569 }
Greg Daniela870b462019-01-08 15:49:46 -0500570 size_t dataSize = 0;
Greg Danield034c622019-11-20 09:33:29 -0500571 VkResult result;
572 GR_VK_CALL_RESULT(fGpu, result, GetPipelineCacheData(fGpu->device(), this->pipelineCache(),
573 &dataSize, nullptr));
574 if (result != VK_SUCCESS) {
575 return;
576 }
Greg Daniela870b462019-01-08 15:49:46 -0500577
578 std::unique_ptr<uint8_t[]> data(new uint8_t[dataSize]);
579
Greg Danield034c622019-11-20 09:33:29 -0500580 GR_VK_CALL_RESULT(fGpu, result, GetPipelineCacheData(fGpu->device(), this->pipelineCache(),
581 &dataSize, (void*)data.get()));
582 if (result != VK_SUCCESS) {
583 return;
584 }
Greg Daniela870b462019-01-08 15:49:46 -0500585
586 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
587 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
588
Robert Phillips9da87e02019-02-04 13:26:26 -0500589 fGpu->getContext()->priv().getPersistentCache()->store(
Brian Osmanf0de96f2021-02-26 13:54:11 -0500590 *keyData, *SkData::MakeWithoutCopy(data.get(), dataSize), SkString("VkPipelineCache"));
Greg Daniela870b462019-01-08 15:49:46 -0500591}
592
egdanield62e28b2016-06-07 08:43:30 -0700593////////////////////////////////////////////////////////////////////////////////
594
Greg Danieled984762019-11-07 17:15:45 -0500595GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(GrVkRenderPass* renderPass)
596 : fLastReturnedIndex(0) {
597 renderPass->ref();
598 fRenderPasses.push_back(renderPass);
egdanield62e28b2016-06-07 08:43:30 -0700599}
600
601bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
Robert Phillips8bd07192020-05-12 13:26:56 -0400602 const GrVkRenderPass::AttachmentsDescriptor& attachmentsDescriptor,
Greg Danielb4189212020-08-10 11:46:30 -0400603 GrVkRenderPass::AttachmentFlags attachmentFlags,
Greg Daniel7acddf52020-12-16 15:15:51 -0500604 SelfDependencyFlags selfDepFlags,
605 LoadFromResolve loadFromResolve) const {
egdanield62e28b2016-06-07 08:43:30 -0700606 // The first GrVkRenderpass should always exists since we create the basic load store
607 // render pass on create
608 SkASSERT(fRenderPasses[0]);
Greg Daniel7acddf52020-12-16 15:15:51 -0500609 return fRenderPasses[0]->isCompatible(attachmentsDescriptor, attachmentFlags, selfDepFlags,
610 loadFromResolve);
egdanield62e28b2016-06-07 08:43:30 -0700611}
612
egdaniel2feb0932016-06-08 06:48:09 -0700613GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
Greg Daniele643da62019-11-05 12:36:42 -0500614 GrVkGpu* gpu,
615 const GrVkRenderPass::LoadStoreOps& colorOps,
Greg Daniel7acddf52020-12-16 15:15:51 -0500616 const GrVkRenderPass::LoadStoreOps& resolveOps,
Greg Daniele643da62019-11-05 12:36:42 -0500617 const GrVkRenderPass::LoadStoreOps& stencilOps) {
egdaniel2feb0932016-06-08 06:48:09 -0700618 for (int i = 0; i < fRenderPasses.count(); ++i) {
619 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
Greg Daniel7acddf52020-12-16 15:15:51 -0500620 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, resolveOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700621 fLastReturnedIndex = idx;
622 return fRenderPasses[idx];
623 }
624 }
Greg Danieled984762019-11-07 17:15:45 -0500625 GrVkRenderPass* renderPass = GrVkRenderPass::Create(gpu, *this->getCompatibleRenderPass(),
Greg Daniel7acddf52020-12-16 15:15:51 -0500626 colorOps, resolveOps, stencilOps);
Greg Danieled984762019-11-07 17:15:45 -0500627 if (!renderPass) {
628 return nullptr;
629 }
630 fRenderPasses.push_back(renderPass);
egdaniel2feb0932016-06-08 06:48:09 -0700631 fLastReturnedIndex = fRenderPasses.count() - 1;
632 return renderPass;
633}
634
Jim Van Verth5082df12020-03-11 16:14:51 -0400635void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources() {
egdanield62e28b2016-06-07 08:43:30 -0700636 for (int i = 0; i < fRenderPasses.count(); ++i) {
637 if (fRenderPasses[i]) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400638 fRenderPasses[i]->unref();
egdanield62e28b2016-06-07 08:43:30 -0700639 fRenderPasses[i] = nullptr;
640 }
641 }
642}
643