blob: c86902923b7a330a7952ab639d07a9917c5f52e5 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkResourceProvider.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/core/SkTaskGroup.h"
11#include "src/gpu/GrContextPriv.h"
Brian Salomon201cdbb2019-08-14 17:00:30 -040012#include "src/gpu/GrSamplerState.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/vk/GrVkCommandBuffer.h"
14#include "src/gpu/vk/GrVkCommandPool.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/vk/GrVkGpu.h"
16#include "src/gpu/vk/GrVkPipeline.h"
17#include "src/gpu/vk/GrVkRenderTarget.h"
18#include "src/gpu/vk/GrVkUniformBuffer.h"
19#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050020
21#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050022std::atomic<uint32_t> GrVkResource::fKeyCounter{0};
Greg Daniel164a9f02016-02-22 09:56:40 -050023#endif
24
egdaniel778555c2016-05-02 06:50:36 -070025GrVkResourceProvider::GrVkResourceProvider(GrVkGpu* gpu)
26 : fGpu(gpu)
egdaniel707bbd62016-07-26 07:19:47 -070027 , fPipelineCache(VK_NULL_HANDLE) {
egdaniel22281c12016-03-23 13:49:40 -070028 fPipelineStateCache = new PipelineStateCache(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050029}
30
31GrVkResourceProvider::~GrVkResourceProvider() {
egdanield62e28b2016-06-07 08:43:30 -070032 SkASSERT(0 == fRenderPassArray.count());
Greg Danielb46add82019-01-02 14:51:29 -050033 SkASSERT(0 == fExternalRenderPasses.count());
jvanverth03509ea2016-03-02 13:19:47 -080034 SkASSERT(VK_NULL_HANDLE == fPipelineCache);
egdaniel22281c12016-03-23 13:49:40 -070035 delete fPipelineStateCache;
jvanverth03509ea2016-03-02 13:19:47 -080036}
37
Greg Daniela870b462019-01-08 15:49:46 -050038VkPipelineCache GrVkResourceProvider::pipelineCache() {
39 if (fPipelineCache == VK_NULL_HANDLE) {
40 VkPipelineCacheCreateInfo createInfo;
41 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
42 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
43 createInfo.pNext = nullptr;
44 createInfo.flags = 0;
egdaniel778555c2016-05-02 06:50:36 -070045
Robert Phillips9da87e02019-02-04 13:26:26 -050046 auto persistentCache = fGpu->getContext()->priv().getPersistentCache();
Greg Daniela870b462019-01-08 15:49:46 -050047 sk_sp<SkData> cached;
48 if (persistentCache) {
49 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
50 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
51 cached = persistentCache->load(*keyData);
52 }
53 bool usedCached = false;
54 if (cached) {
55 uint32_t* cacheHeader = (uint32_t*)cached->data();
56 if (cacheHeader[1] == VK_PIPELINE_CACHE_HEADER_VERSION_ONE) {
57 // For version one of the header, the total header size is 16 bytes plus
58 // VK_UUID_SIZE bytes. See Section 9.6 (Pipeline Cache) in the vulkan spec to see
59 // the breakdown of these bytes.
60 SkASSERT(cacheHeader[0] == 16 + VK_UUID_SIZE);
61 const VkPhysicalDeviceProperties& devProps = fGpu->physicalDeviceProperties();
62 const uint8_t* supportedPipelineCacheUUID = devProps.pipelineCacheUUID;
63 if (cacheHeader[2] == devProps.vendorID && cacheHeader[3] == devProps.deviceID &&
64 !memcmp(&cacheHeader[4], supportedPipelineCacheUUID, VK_UUID_SIZE)) {
65 createInfo.initialDataSize = cached->size();
66 createInfo.pInitialData = cached->data();
67 usedCached = true;
68 }
69 }
70 }
71 if (!usedCached) {
72 createInfo.initialDataSize = 0;
73 createInfo.pInitialData = nullptr;
74 }
75 VkResult result = GR_VK_CALL(fGpu->vkInterface(),
76 CreatePipelineCache(fGpu->device(), &createInfo, nullptr,
77 &fPipelineCache));
78 SkASSERT(VK_SUCCESS == result);
79 if (VK_SUCCESS != result) {
80 fPipelineCache = VK_NULL_HANDLE;
81 }
82 }
83 return fPipelineCache;
84}
85
86void GrVkResourceProvider::init() {
egdaniel707bbd62016-07-26 07:19:47 -070087 // Init uniform descriptor objects
Greg Daniel18f96022017-05-04 15:09:03 -040088 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateUniformManager(fGpu);
89 fDescriptorSetManagers.emplace_back(dsm);
egdaniel707bbd62016-07-26 07:19:47 -070090 SkASSERT(1 == fDescriptorSetManagers.count());
91 fUniformDSHandle = GrVkDescriptorSetManager::Handle(0);
Greg Daniel164a9f02016-02-22 09:56:40 -050092}
93
Robert Phillips901aff02019-10-08 12:32:56 -040094GrVkPipeline* GrVkResourceProvider::createPipeline(const GrProgramInfo& programInfo,
Robert Phillips6c2aa7a2019-10-17 19:06:39 +000095 const GrStencilSettings& stencil,
Greg Daniel164a9f02016-02-22 09:56:40 -050096 VkPipelineShaderStageCreateInfo* shaderStageInfo,
97 int shaderStageCount,
98 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -040099 VkRenderPass compatibleRenderPass,
Greg Daniel164a9f02016-02-22 09:56:40 -0500100 VkPipelineLayout layout) {
Robert Phillips6c2aa7a2019-10-17 19:06:39 +0000101 return GrVkPipeline::Create(fGpu, programInfo, stencil, shaderStageInfo,
Robert Phillips901aff02019-10-08 12:32:56 -0400102 shaderStageCount, primitiveType, compatibleRenderPass,
103 layout, this->pipelineCache());
Greg Daniel164a9f02016-02-22 09:56:40 -0500104}
105
Greg Daniel164a9f02016-02-22 09:56:40 -0500106// To create framebuffers, we first need to create a simple RenderPass that is
halcanary9d524f22016-03-29 09:03:52 -0700107// only used for framebuffer creation. When we actually render we will create
Greg Daniel164a9f02016-02-22 09:56:40 -0500108// RenderPasses as needed that are compatible with the framebuffer.
halcanary9d524f22016-03-29 09:03:52 -0700109const GrVkRenderPass*
egdanield62e28b2016-06-07 08:43:30 -0700110GrVkResourceProvider::findCompatibleRenderPass(const GrVkRenderTarget& target,
111 CompatibleRPHandle* compatibleHandle) {
112 for (int i = 0; i < fRenderPassArray.count(); ++i) {
113 if (fRenderPassArray[i].isCompatible(target)) {
114 const GrVkRenderPass* renderPass = fRenderPassArray[i].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500115 renderPass->ref();
egdanield62e28b2016-06-07 08:43:30 -0700116 if (compatibleHandle) {
117 *compatibleHandle = CompatibleRPHandle(i);
118 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500119 return renderPass;
120 }
121 }
122
egdanield62e28b2016-06-07 08:43:30 -0700123 const GrVkRenderPass* renderPass =
124 fRenderPassArray.emplace_back(fGpu, target).getCompatibleRenderPass();
125 renderPass->ref();
126
127 if (compatibleHandle) {
128 *compatibleHandle = CompatibleRPHandle(fRenderPassArray.count() - 1);
129 }
130 return renderPass;
131}
132
133const GrVkRenderPass*
134GrVkResourceProvider::findCompatibleRenderPass(const CompatibleRPHandle& compatibleHandle) {
135 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
136 int index = compatibleHandle.toIndex();
137 const GrVkRenderPass* renderPass = fRenderPassArray[index].getCompatibleRenderPass();
Greg Daniel164a9f02016-02-22 09:56:40 -0500138 renderPass->ref();
139 return renderPass;
140}
141
Greg Danielb46add82019-01-02 14:51:29 -0500142const GrVkRenderPass* GrVkResourceProvider::findCompatibleExternalRenderPass(
143 VkRenderPass renderPass, uint32_t colorAttachmentIndex) {
144 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
145 if (fExternalRenderPasses[i]->isCompatibleExternalRP(renderPass)) {
146 fExternalRenderPasses[i]->ref();
147#ifdef SK_DEBUG
148 uint32_t cachedColorIndex;
149 SkASSERT(fExternalRenderPasses[i]->colorAttachmentIndex(&cachedColorIndex));
150 SkASSERT(cachedColorIndex == colorAttachmentIndex);
151#endif
152 return fExternalRenderPasses[i];
153 }
154 }
155
156 const GrVkRenderPass* newRenderPass = new GrVkRenderPass(renderPass, colorAttachmentIndex);
157 fExternalRenderPasses.push_back(newRenderPass);
158 newRenderPass->ref();
159 return newRenderPass;
160}
161
egdaniel2feb0932016-06-08 06:48:09 -0700162const GrVkRenderPass* GrVkResourceProvider::findRenderPass(
Greg Danielfa3adf72019-11-07 09:53:41 -0500163 GrVkRenderTarget* target,
egdaniel2feb0932016-06-08 06:48:09 -0700164 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700165 const GrVkRenderPass::LoadStoreOps& stencilOps,
166 CompatibleRPHandle* compatibleHandle) {
egdaniel066df7c2016-06-08 14:02:27 -0700167 GrVkResourceProvider::CompatibleRPHandle tempRPHandle;
168 GrVkResourceProvider::CompatibleRPHandle* pRPHandle = compatibleHandle ? compatibleHandle
169 : &tempRPHandle;
Greg Danielfa3adf72019-11-07 09:53:41 -0500170 *pRPHandle = target->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -0700171
egdaniel2feb0932016-06-08 06:48:09 -0700172 // This will get us the handle to (and possible create) the compatible set for the specific
173 // GrVkRenderPass we are looking for.
Greg Danielfa3adf72019-11-07 09:53:41 -0500174 this->findCompatibleRenderPass(*target, compatibleHandle);
Greg Danield3682112016-10-03 15:06:07 -0400175 return this->findRenderPass(*pRPHandle, colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700176}
177
178const GrVkRenderPass*
179GrVkResourceProvider::findRenderPass(const CompatibleRPHandle& compatibleHandle,
180 const GrVkRenderPass::LoadStoreOps& colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700181 const GrVkRenderPass::LoadStoreOps& stencilOps) {
182 SkASSERT(compatibleHandle.isValid() && compatibleHandle.toIndex() < fRenderPassArray.count());
183 CompatibleRenderPassSet& compatibleSet = fRenderPassArray[compatibleHandle.toIndex()];
184 const GrVkRenderPass* renderPass = compatibleSet.getRenderPass(fGpu,
185 colorOps,
egdaniel2feb0932016-06-08 06:48:09 -0700186 stencilOps);
187 renderPass->ref();
188 return renderPass;
189}
190
Greg Daniel164a9f02016-02-22 09:56:40 -0500191GrVkDescriptorPool* GrVkResourceProvider::findOrCreateCompatibleDescriptorPool(
egdanielc2dc1b22016-03-18 13:18:23 -0700192 VkDescriptorType type, uint32_t count) {
Greg Daniel9b63dc82019-11-06 09:21:55 -0500193 return GrVkDescriptorPool::Create(fGpu, type, count);
Greg Daniel164a9f02016-02-22 09:56:40 -0500194}
195
Greg Daniel7e000222018-12-03 10:08:21 -0500196GrVkSampler* GrVkResourceProvider::findOrCreateCompatibleSampler(
197 const GrSamplerState& params, const GrVkYcbcrConversionInfo& ycbcrInfo) {
198 GrVkSampler* sampler = fSamplers.find(GrVkSampler::GenerateKey(params, ycbcrInfo));
egdaniel8b6394c2016-03-04 07:35:10 -0800199 if (!sampler) {
Greg Daniel7e000222018-12-03 10:08:21 -0500200 sampler = GrVkSampler::Create(fGpu, params, ycbcrInfo);
201 if (!sampler) {
202 return nullptr;
203 }
egdaniel8b6394c2016-03-04 07:35:10 -0800204 fSamplers.add(sampler);
205 }
206 SkASSERT(sampler);
207 sampler->ref();
208 return sampler;
209}
210
Greg Daniel7e000222018-12-03 10:08:21 -0500211GrVkSamplerYcbcrConversion* GrVkResourceProvider::findOrCreateCompatibleSamplerYcbcrConversion(
212 const GrVkYcbcrConversionInfo& ycbcrInfo) {
213 GrVkSamplerYcbcrConversion* ycbcrConversion =
214 fYcbcrConversions.find(GrVkSamplerYcbcrConversion::GenerateKey(ycbcrInfo));
215 if (!ycbcrConversion) {
216 ycbcrConversion = GrVkSamplerYcbcrConversion::Create(fGpu, ycbcrInfo);
217 if (!ycbcrConversion) {
218 return nullptr;
219 }
220 fYcbcrConversions.add(ycbcrConversion);
221 }
222 SkASSERT(ycbcrConversion);
223 ycbcrConversion->ref();
224 return ycbcrConversion;
225}
226
Greg Daniel09eeefb2017-10-16 15:15:02 -0400227GrVkPipelineState* GrVkResourceProvider::findOrCreateCompatiblePipelineState(
Robert Phillips901aff02019-10-08 12:32:56 -0400228 GrRenderTarget* renderTarget,
229 const GrProgramInfo& programInfo,
230 GrPrimitiveType primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400231 VkRenderPass compatibleRenderPass) {
Robert Phillips901aff02019-10-08 12:32:56 -0400232 return fPipelineStateCache->refPipelineState(renderTarget, programInfo,
233 primitiveType, compatibleRenderPass);
egdaniel22281c12016-03-23 13:49:40 -0700234}
235
Greg Daniela7543782017-05-02 14:01:43 -0400236void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
237 const GrVkUniformHandler& uniformHandler,
egdaniel707bbd62016-07-26 07:19:47 -0700238 GrVkDescriptorSetManager::Handle* handle) {
egdaniela95220d2016-07-21 11:50:37 -0700239 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400240 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
241 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniela95220d2016-07-21 11:50:37 -0700242 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400243 if (fDescriptorSetManagers[i]->isCompatible(type, &uniformHandler)) {
egdaniela95220d2016-07-21 11:50:37 -0700244 *handle = GrVkDescriptorSetManager::Handle(i);
245 return;
246 }
247 }
248
Greg Daniel18f96022017-05-04 15:09:03 -0400249 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
250 uniformHandler);
251 fDescriptorSetManagers.emplace_back(dsm);
egdaniela95220d2016-07-21 11:50:37 -0700252 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
253}
254
Greg Daniela7543782017-05-02 14:01:43 -0400255void GrVkResourceProvider::getSamplerDescriptorSetHandle(VkDescriptorType type,
256 const SkTArray<uint32_t>& visibilities,
egdaniel4d866df2016-08-25 13:52:00 -0700257 GrVkDescriptorSetManager::Handle* handle) {
258 SkASSERT(handle);
Greg Daniela7543782017-05-02 14:01:43 -0400259 SkASSERT(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
260 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type);
egdaniel4d866df2016-08-25 13:52:00 -0700261 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400262 if (fDescriptorSetManagers[i]->isCompatible(type, visibilities)) {
egdaniel4d866df2016-08-25 13:52:00 -0700263 *handle = GrVkDescriptorSetManager::Handle(i);
264 return;
265 }
266 }
267
Greg Daniel18f96022017-05-04 15:09:03 -0400268 GrVkDescriptorSetManager* dsm = GrVkDescriptorSetManager::CreateSamplerManager(fGpu, type,
269 visibilities);
270 fDescriptorSetManagers.emplace_back(dsm);
egdaniel4d866df2016-08-25 13:52:00 -0700271 *handle = GrVkDescriptorSetManager::Handle(fDescriptorSetManagers.count() - 1);
272}
273
egdaniel707bbd62016-07-26 07:19:47 -0700274VkDescriptorSetLayout GrVkResourceProvider::getUniformDSLayout() const {
275 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400276 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700277}
278
279VkDescriptorSetLayout GrVkResourceProvider::getSamplerDSLayout(
280 const GrVkDescriptorSetManager::Handle& handle) const {
281 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400282 return fDescriptorSetManagers[handle.toIndex()]->layout();
egdaniel707bbd62016-07-26 07:19:47 -0700283}
284
egdaniela95220d2016-07-21 11:50:37 -0700285const GrVkDescriptorSet* GrVkResourceProvider::getUniformDescriptorSet() {
286 SkASSERT(fUniformDSHandle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400287 return fDescriptorSetManagers[fUniformDSHandle.toIndex()]->getDescriptorSet(fGpu,
288 fUniformDSHandle);
egdaniela95220d2016-07-21 11:50:37 -0700289}
290
egdaniel707bbd62016-07-26 07:19:47 -0700291const GrVkDescriptorSet* GrVkResourceProvider::getSamplerDescriptorSet(
292 const GrVkDescriptorSetManager::Handle& handle) {
293 SkASSERT(handle.isValid());
Greg Daniel18f96022017-05-04 15:09:03 -0400294 return fDescriptorSetManagers[handle.toIndex()]->getDescriptorSet(fGpu, handle);
egdaniel707bbd62016-07-26 07:19:47 -0700295}
egdaniela95220d2016-07-21 11:50:37 -0700296
297void GrVkResourceProvider::recycleDescriptorSet(const GrVkDescriptorSet* descSet,
298 const GrVkDescriptorSetManager::Handle& handle) {
299 SkASSERT(descSet);
300 SkASSERT(handle.isValid());
301 int managerIdx = handle.toIndex();
302 SkASSERT(managerIdx < fDescriptorSetManagers.count());
Greg Daniel18f96022017-05-04 15:09:03 -0400303 fDescriptorSetManagers[managerIdx]->recycleDescriptorSet(descSet);
egdaniel778555c2016-05-02 06:50:36 -0700304}
305
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500306GrVkCommandPool* GrVkResourceProvider::findOrCreateCommandPool() {
307 std::unique_lock<std::recursive_mutex> lock(fBackgroundMutex);
308 GrVkCommandPool* result;
309 if (fAvailableCommandPools.count()) {
310 result = fAvailableCommandPools.back();
311 fAvailableCommandPools.pop_back();
jvanverth7ec92412016-07-06 09:24:57 -0700312 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500313 result = GrVkCommandPool::Create(fGpu);
Greg Daniel9b63dc82019-11-06 09:21:55 -0500314 if (!result) {
315 return nullptr;
316 }
jvanverth7ec92412016-07-06 09:24:57 -0700317 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500318 SkASSERT(result->unique());
319 SkDEBUGCODE(
320 for (const GrVkCommandPool* pool : fActiveCommandPools) {
321 SkASSERT(pool != result);
322 }
323 for (const GrVkCommandPool* pool : fAvailableCommandPools) {
324 SkASSERT(pool != result);
325 }
Ben Wagner1c0cacf2019-01-14 12:57:36 -0500326 )
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500327 fActiveCommandPools.push_back(result);
328 result->ref();
329 return result;
Greg Daniel164a9f02016-02-22 09:56:40 -0500330}
331
332void GrVkResourceProvider::checkCommandBuffers() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500333 for (int i = fActiveCommandPools.count() - 1; i >= 0; --i) {
334 GrVkCommandPool* pool = fActiveCommandPools[i];
335 if (!pool->isOpen()) {
336 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
337 if (buffer->finished(fGpu)) {
338 fActiveCommandPools.removeShuffle(i);
339 this->backgroundReset(pool);
340 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500341 }
342 }
343}
344
Greg Daniela3aa75a2019-04-12 14:24:55 -0400345void GrVkResourceProvider::addFinishedProcToActiveCommandBuffers(
346 GrGpuFinishedProc finishedProc, GrGpuFinishedContext finishedContext) {
347 sk_sp<GrRefCntedCallback> procRef(new GrRefCntedCallback(finishedProc, finishedContext));
348 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
349 GrVkCommandPool* pool = fActiveCommandPools[i];
350 if (!pool->isOpen()) {
351 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer();
352 buffer->addFinishedProc(procRef);
353 }
354 }
355}
356
jvanverth4c6e47a2016-07-22 10:34:52 -0700357const GrVkResource* GrVkResourceProvider::findOrCreateStandardUniformBufferResource() {
358 const GrVkResource* resource = nullptr;
359 int count = fAvailableUniformBufferResources.count();
360 if (count > 0) {
361 resource = fAvailableUniformBufferResources[count - 1];
362 fAvailableUniformBufferResources.removeShuffle(count - 1);
363 } else {
364 resource = GrVkUniformBuffer::CreateResource(fGpu, GrVkUniformBuffer::kStandardSize);
365 }
366 return resource;
367}
368
369void GrVkResourceProvider::recycleStandardUniformBufferResource(const GrVkResource* resource) {
370 fAvailableUniformBufferResources.push_back(resource);
371}
372
Jim Van Verth09557d72016-11-07 11:10:21 -0500373void GrVkResourceProvider::destroyResources(bool deviceLost) {
Robert Phillips9da87e02019-02-04 13:26:26 -0500374 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500375 if (taskGroup) {
376 taskGroup->wait();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000377 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000378
egdanield62e28b2016-06-07 08:43:30 -0700379 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
380 for (int i = 0; i < fRenderPassArray.count(); ++i) {
381 fRenderPassArray[i].releaseResources(fGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500382 }
egdanield62e28b2016-06-07 08:43:30 -0700383 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500384
Greg Danielb46add82019-01-02 14:51:29 -0500385 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
386 fExternalRenderPasses[i]->unref(fGpu);
387 }
388 fExternalRenderPasses.reset();
389
egdaniel8b6394c2016-03-04 07:35:10 -0800390 // Iterate through all store GrVkSamplers and unref them before resetting the hash.
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700391 for (decltype(fSamplers)::Iter iter(&fSamplers); !iter.done(); ++iter) {
egdaniel8b6394c2016-03-04 07:35:10 -0800392 (*iter).unref(fGpu);
393 }
394 fSamplers.reset();
395
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700396 for (decltype(fYcbcrConversions)::Iter iter(&fYcbcrConversions); !iter.done(); ++iter) {
397 (*iter).unref(fGpu);
398 }
399 fYcbcrConversions.reset();
400
egdaniel22281c12016-03-23 13:49:40 -0700401 fPipelineStateCache->release();
402
jvanverth03509ea2016-03-02 13:19:47 -0800403 GR_VK_CALL(fGpu->vkInterface(), DestroyPipelineCache(fGpu->device(), fPipelineCache, nullptr));
404 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700405
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500406 for (GrVkCommandPool* pool : fActiveCommandPools) {
407 SkASSERT(pool->unique());
408 pool->unref(fGpu);
409 }
410 fActiveCommandPools.reset();
411
412 for (GrVkCommandPool* pool : fAvailableCommandPools) {
413 SkASSERT(pool->unique());
414 pool->unref(fGpu);
415 }
416 fAvailableCommandPools.reset();
417
egdaniela95220d2016-07-21 11:50:37 -0700418 // We must release/destroy all command buffers and pipeline states before releasing the
419 // GrVkDescriptorSetManagers
420 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400421 fDescriptorSetManagers[i]->release(fGpu);
egdaniela95220d2016-07-21 11:50:37 -0700422 }
423 fDescriptorSetManagers.reset();
jvanverth4c6e47a2016-07-22 10:34:52 -0700424
425 // release our uniform buffers
426 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
427 SkASSERT(fAvailableUniformBufferResources[i]->unique());
428 fAvailableUniformBufferResources[i]->unref(fGpu);
429 }
430 fAvailableUniformBufferResources.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500431}
432
433void GrVkResourceProvider::abandonResources() {
Robert Phillips9da87e02019-02-04 13:26:26 -0500434 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500435 if (taskGroup) {
436 taskGroup->wait();
Greg Daniel164a9f02016-02-22 09:56:40 -0500437 }
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000438
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500439 // Abandon all command pools
440 for (int i = 0; i < fActiveCommandPools.count(); ++i) {
441 SkASSERT(fActiveCommandPools[i]->unique());
442 fActiveCommandPools[i]->unrefAndAbandon();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000443 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500444 fActiveCommandPools.reset();
445 for (int i = 0; i < fAvailableCommandPools.count(); ++i) {
446 SkASSERT(fAvailableCommandPools[i]->unique());
447 fAvailableCommandPools[i]->unrefAndAbandon();
448 }
449 fAvailableCommandPools.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500450
egdanield62e28b2016-06-07 08:43:30 -0700451 // loop over all render pass sets to make sure we destroy all the internal VkRenderPasses
452 for (int i = 0; i < fRenderPassArray.count(); ++i) {
453 fRenderPassArray[i].abandonResources();
Greg Daniel164a9f02016-02-22 09:56:40 -0500454 }
egdanield62e28b2016-06-07 08:43:30 -0700455 fRenderPassArray.reset();
Greg Daniel164a9f02016-02-22 09:56:40 -0500456
Greg Danielb46add82019-01-02 14:51:29 -0500457 for (int i = 0; i < fExternalRenderPasses.count(); ++i) {
458 fExternalRenderPasses[i]->unrefAndAbandon();
459 }
460 fExternalRenderPasses.reset();
461
egdaniel8b6394c2016-03-04 07:35:10 -0800462 // Iterate through all store GrVkSamplers and unrefAndAbandon them before resetting the hash.
Greg Daniel7e000222018-12-03 10:08:21 -0500463 SkTDynamicHash<GrVkSampler, GrVkSampler::Key>::Iter iter(&fSamplers);
egdaniel8b6394c2016-03-04 07:35:10 -0800464 for (; !iter.done(); ++iter) {
465 (*iter).unrefAndAbandon();
466 }
467 fSamplers.reset();
468
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700469 for (decltype(fYcbcrConversions)::Iter iter(&fYcbcrConversions); !iter.done(); ++iter) {
470 (*iter).unrefAndAbandon();
471 }
472 fYcbcrConversions.reset();
473
egdaniel22281c12016-03-23 13:49:40 -0700474 fPipelineStateCache->abandon();
475
jvanverth03509ea2016-03-02 13:19:47 -0800476 fPipelineCache = VK_NULL_HANDLE;
egdaniel778555c2016-05-02 06:50:36 -0700477
egdaniela95220d2016-07-21 11:50:37 -0700478 // We must abandon all command buffers and pipeline states before abandoning the
479 // GrVkDescriptorSetManagers
480 for (int i = 0; i < fDescriptorSetManagers.count(); ++i) {
Greg Daniel18f96022017-05-04 15:09:03 -0400481 fDescriptorSetManagers[i]->abandon();
egdaniela95220d2016-07-21 11:50:37 -0700482 }
483 fDescriptorSetManagers.reset();
484
jvanverth4c6e47a2016-07-22 10:34:52 -0700485 // release our uniform buffers
486 for (int i = 0; i < fAvailableUniformBufferResources.count(); ++i) {
487 SkASSERT(fAvailableUniformBufferResources[i]->unique());
488 fAvailableUniformBufferResources[i]->unrefAndAbandon();
489 }
490 fAvailableUniformBufferResources.reset();
jvanverth03509ea2016-03-02 13:19:47 -0800491}
egdanield62e28b2016-06-07 08:43:30 -0700492
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500493void GrVkResourceProvider::backgroundReset(GrVkCommandPool* pool) {
Brian Salomone39526b2019-06-24 16:35:53 -0400494 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500495 SkASSERT(pool->unique());
496 pool->releaseResources(fGpu);
Robert Phillips9da87e02019-02-04 13:26:26 -0500497 SkTaskGroup* taskGroup = fGpu->getContext()->priv().getTaskGroup();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500498 if (taskGroup) {
499 taskGroup->add([this, pool]() {
500 this->reset(pool);
501 });
502 } else {
503 this->reset(pool);
504 }
505}
506
507void GrVkResourceProvider::reset(GrVkCommandPool* pool) {
Brian Salomone39526b2019-06-24 16:35:53 -0400508 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500509 SkASSERT(pool->unique());
510 pool->reset(fGpu);
511 std::unique_lock<std::recursive_mutex> providerLock(fBackgroundMutex);
512 fAvailableCommandPools.push_back(pool);
513}
514
Greg Daniela870b462019-01-08 15:49:46 -0500515void GrVkResourceProvider::storePipelineCacheData() {
516 size_t dataSize = 0;
517 VkResult result = GR_VK_CALL(fGpu->vkInterface(), GetPipelineCacheData(fGpu->device(),
518 this->pipelineCache(),
519 &dataSize, nullptr));
520 SkASSERT(result == VK_SUCCESS);
521
522 std::unique_ptr<uint8_t[]> data(new uint8_t[dataSize]);
523
524 result = GR_VK_CALL(fGpu->vkInterface(), GetPipelineCacheData(fGpu->device(),
525 this->pipelineCache(),
526 &dataSize,
527 (void*)data.get()));
528 SkASSERT(result == VK_SUCCESS);
529
530 uint32_t key = GrVkGpu::kPipelineCache_PersistentCacheKeyType;
531 sk_sp<SkData> keyData = SkData::MakeWithoutCopy(&key, sizeof(uint32_t));
532
Robert Phillips9da87e02019-02-04 13:26:26 -0500533 fGpu->getContext()->priv().getPersistentCache()->store(
Greg Daniela870b462019-01-08 15:49:46 -0500534 *keyData, *SkData::MakeWithoutCopy(data.get(), dataSize));
535}
536
egdanield62e28b2016-06-07 08:43:30 -0700537////////////////////////////////////////////////////////////////////////////////
538
539GrVkResourceProvider::CompatibleRenderPassSet::CompatibleRenderPassSet(
Greg Daniele643da62019-11-05 12:36:42 -0500540 GrVkGpu* gpu, const GrVkRenderTarget& target) : fLastReturnedIndex(0) {
egdanield62e28b2016-06-07 08:43:30 -0700541 fRenderPasses.emplace_back(new GrVkRenderPass());
542 fRenderPasses[0]->initSimple(gpu, target);
543}
544
545bool GrVkResourceProvider::CompatibleRenderPassSet::isCompatible(
Greg Daniele643da62019-11-05 12:36:42 -0500546 const GrVkRenderTarget& target) const {
egdanield62e28b2016-06-07 08:43:30 -0700547 // The first GrVkRenderpass should always exists since we create the basic load store
548 // render pass on create
549 SkASSERT(fRenderPasses[0]);
550 return fRenderPasses[0]->isCompatible(target);
551}
552
egdaniel2feb0932016-06-08 06:48:09 -0700553GrVkRenderPass* GrVkResourceProvider::CompatibleRenderPassSet::getRenderPass(
Greg Daniele643da62019-11-05 12:36:42 -0500554 GrVkGpu* gpu,
555 const GrVkRenderPass::LoadStoreOps& colorOps,
556 const GrVkRenderPass::LoadStoreOps& stencilOps) {
egdaniel2feb0932016-06-08 06:48:09 -0700557 for (int i = 0; i < fRenderPasses.count(); ++i) {
558 int idx = (i + fLastReturnedIndex) % fRenderPasses.count();
egdanielce3bfb12016-08-26 11:05:13 -0700559 if (fRenderPasses[idx]->equalLoadStoreOps(colorOps, stencilOps)) {
egdaniel2feb0932016-06-08 06:48:09 -0700560 fLastReturnedIndex = idx;
561 return fRenderPasses[idx];
562 }
563 }
egdaniel9cb63402016-06-23 08:37:05 -0700564 GrVkRenderPass* renderPass = fRenderPasses.emplace_back(new GrVkRenderPass());
egdanielce3bfb12016-08-26 11:05:13 -0700565 renderPass->init(gpu, *this->getCompatibleRenderPass(), colorOps, stencilOps);
egdaniel2feb0932016-06-08 06:48:09 -0700566 fLastReturnedIndex = fRenderPasses.count() - 1;
567 return renderPass;
568}
569
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500570void GrVkResourceProvider::CompatibleRenderPassSet::releaseResources(GrVkGpu* gpu) {
egdanield62e28b2016-06-07 08:43:30 -0700571 for (int i = 0; i < fRenderPasses.count(); ++i) {
572 if (fRenderPasses[i]) {
573 fRenderPasses[i]->unref(gpu);
574 fRenderPasses[i] = nullptr;
575 }
576 }
577}
578
579void GrVkResourceProvider::CompatibleRenderPassSet::abandonResources() {
580 for (int i = 0; i < fRenderPasses.count(); ++i) {
581 if (fRenderPasses[i]) {
582 fRenderPasses[i]->unrefAndAbandon();
583 fRenderPasses[i] = nullptr;
584 }
585 }
586}