blob: 5727c4e9233cb6bcf9af698c84e71638972e2375 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
Greg Danielaf1d1932021-02-08 13:55:26 -05002 * Copyright 2021 Google LLC
Greg Daniel164a9f02016-02-22 09:56:40 -05003 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkBuffer.h"
Greg Daniel87d784f2021-02-02 15:36:06 -05009
Greg Daniel2e967df2021-02-08 10:38:31 -050010#include "include/gpu/GrDirectContext.h"
11#include "src/gpu/GrDirectContextPriv.h"
Greg Danielaf1d1932021-02-08 13:55:26 -050012#include "src/gpu/GrResourceProvider.h"
13#include "src/gpu/vk/GrVkDescriptorSet.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/vk/GrVkGpu.h"
15#include "src/gpu/vk/GrVkMemory.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050017
18#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
19
Greg Danielaf1d1932021-02-08 13:55:26 -050020GrVkBuffer::GrVkBuffer(GrVkGpu* gpu,
21 size_t sizeInBytes,
22 GrGpuBufferType bufferType,
23 GrAccessPattern accessPattern,
24 VkBuffer buffer,
25 const GrVkAlloc& alloc,
26 const GrVkDescriptorSet* uniformDescriptorSet)
27 : GrGpuBuffer(gpu, sizeInBytes, bufferType, accessPattern)
28 , fBuffer(buffer)
29 , fAlloc(alloc)
30 , fUniformDescriptorSet(uniformDescriptorSet) {
31 // We always require dynamic buffers to be mappable
32 SkASSERT(accessPattern != kDynamic_GrAccessPattern || this->isVkMappable());
33 SkASSERT(bufferType != GrGpuBufferType::kUniform || uniformDescriptorSet);
34 this->registerWithCache(SkBudgeted::kYes);
Greg Danieldd520a32021-02-02 10:37:29 -050035}
36
Greg Danielaf1d1932021-02-08 13:55:26 -050037static const GrVkDescriptorSet* make_uniform_desc_set(GrVkGpu* gpu, VkBuffer buffer, size_t size) {
38 const GrVkDescriptorSet* descriptorSet = gpu->resourceProvider().getUniformDescriptorSet();
39 if (!descriptorSet) {
40 return nullptr;
41 }
42
43 VkDescriptorBufferInfo bufferInfo;
44 memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo));
45 bufferInfo.buffer = buffer;
46 bufferInfo.offset = 0;
47 bufferInfo.range = size;
48
49 VkWriteDescriptorSet descriptorWrite;
50 memset(&descriptorWrite, 0, sizeof(VkWriteDescriptorSet));
51 descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
52 descriptorWrite.pNext = nullptr;
53 descriptorWrite.dstSet = *descriptorSet->descriptorSet();
54 descriptorWrite.dstBinding = GrVkUniformHandler::kUniformBinding;
55 descriptorWrite.dstArrayElement = 0;
56 descriptorWrite.descriptorCount = 1;
57 descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
58 descriptorWrite.pImageInfo = nullptr;
59 descriptorWrite.pBufferInfo = &bufferInfo;
60 descriptorWrite.pTexelBufferView = nullptr;
61
62 GR_VK_CALL(gpu->vkInterface(),
63 UpdateDescriptorSets(gpu->device(), 1, &descriptorWrite, 0, nullptr));
64 return descriptorSet;
65}
66
67sk_sp<GrVkBuffer> GrVkBuffer::Make(GrVkGpu* gpu,
68 size_t size,
69 GrGpuBufferType bufferType,
70 GrAccessPattern accessPattern) {
71 VkBuffer buffer;
72 GrVkAlloc alloc;
73
74 // The only time we don't require mappable buffers is when we have a static access pattern and
75 // we're on a device where gpu only memory has faster reads on the gpu than memory that is also
76 // mappable on the cpu. Protected memory always uses mappable buffers.
77 bool requiresMappable = gpu->protectedContext() ||
78 accessPattern == kDynamic_GrAccessPattern ||
79 accessPattern == kStream_GrAccessPattern ||
80 !gpu->vkCaps().gpuOnlyBuffersMorePerformant();
81
82 using BufferUsage = GrVkMemoryAllocator::BufferUsage;
83 BufferUsage allocUsage;
Greg Daniel164a9f02016-02-22 09:56:40 -050084
85 // create the buffer object
86 VkBufferCreateInfo bufInfo;
87 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
88 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
89 bufInfo.flags = 0;
Greg Danielaf1d1932021-02-08 13:55:26 -050090 bufInfo.size = size;
91 switch (bufferType) {
92 case GrGpuBufferType::kVertex:
jvanvertha584de92016-06-30 09:10:52 -070093 bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
Greg Danielaf1d1932021-02-08 13:55:26 -050094 allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly;
jvanvertha584de92016-06-30 09:10:52 -070095 break;
Greg Danielaf1d1932021-02-08 13:55:26 -050096 case GrGpuBufferType::kIndex:
jvanvertha584de92016-06-30 09:10:52 -070097 bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
Greg Danielaf1d1932021-02-08 13:55:26 -050098 allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly;
jvanvertha584de92016-06-30 09:10:52 -070099 break;
Greg Danielaf1d1932021-02-08 13:55:26 -0500100 case GrGpuBufferType::kDrawIndirect:
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600101 bufInfo.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
Greg Danielaf1d1932021-02-08 13:55:26 -0500102 allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly;
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600103 break;
Greg Danielaf1d1932021-02-08 13:55:26 -0500104 case GrGpuBufferType::kUniform:
jvanvertha584de92016-06-30 09:10:52 -0700105 bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
Greg Danielaf1d1932021-02-08 13:55:26 -0500106 allocUsage = BufferUsage::kCpuWritesGpuReads;
jvanvertha584de92016-06-30 09:10:52 -0700107 break;
Greg Danielaf1d1932021-02-08 13:55:26 -0500108 case GrGpuBufferType::kXferCpuToGpu:
jvanvertha584de92016-06-30 09:10:52 -0700109 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Greg Daniel9408a612021-02-11 16:32:26 -0500110 allocUsage = BufferUsage::kTransfersFromCpuToGpu;
jvanvertha584de92016-06-30 09:10:52 -0700111 break;
Greg Danielaf1d1932021-02-08 13:55:26 -0500112 case GrGpuBufferType::kXferGpuToCpu:
jvanvertha584de92016-06-30 09:10:52 -0700113 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
Greg Daniel84b02332021-02-10 13:55:48 -0500114 allocUsage = BufferUsage::kTransfersFromGpuToCpu;
jvanvertha584de92016-06-30 09:10:52 -0700115 break;
Greg Daniel164a9f02016-02-22 09:56:40 -0500116 }
Greg Danielaf1d1932021-02-08 13:55:26 -0500117 // We may not always get a mappable buffer for non dynamic access buffers. Thus we set the
118 // transfer dst usage bit in case we need to do a copy to write data.
119 // TODO: It doesn't really hurt setting this extra usage flag, but maybe we can narrow the scope
120 // of buffers we set it on more than just not dynamic.
121 if (!requiresMappable) {
jvanverth68c3d302016-09-23 10:30:04 -0700122 bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
123 }
jvanvertha584de92016-06-30 09:10:52 -0700124
Greg Daniel164a9f02016-02-22 09:56:40 -0500125 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
126 bufInfo.queueFamilyIndexCount = 0;
127 bufInfo.pQueueFamilyIndices = nullptr;
128
129 VkResult err;
130 err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer));
131 if (err) {
132 return nullptr;
133 }
134
Greg Danielaf1d1932021-02-08 13:55:26 -0500135 if (!GrVkMemory::AllocAndBindBufferMemory(gpu, buffer, allocUsage, &alloc)) {
Greg Daniel8eb119a2021-02-04 09:41:19 -0500136 VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700137 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -0500138 }
139
Greg Danielaf1d1932021-02-08 13:55:26 -0500140 // If this is a uniform buffer we must setup a descriptor set
141 const GrVkDescriptorSet* uniformDescSet = nullptr;
142 if (bufferType == GrGpuBufferType::kUniform) {
143 uniformDescSet = make_uniform_desc_set(gpu, buffer, size);
144 if (!uniformDescSet) {
145 VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
146 GrVkMemory::FreeBufferMemory(gpu, alloc);
147 return nullptr;
Greg Danielb7bcabb2020-07-20 09:17:24 -0400148 }
Greg Daniel9d02a4c2020-07-15 14:26:08 -0400149 }
Greg Danielaf1d1932021-02-08 13:55:26 -0500150
151 return sk_sp<GrVkBuffer>(new GrVkBuffer(gpu, size, bufferType, accessPattern, buffer, alloc,
152 uniformDescSet));
Greg Daniel164a9f02016-02-22 09:56:40 -0500153}
154
Greg Danielaf1d1932021-02-08 13:55:26 -0500155void GrVkBuffer::vkMap(size_t size) {
156 SkASSERT(!fMapPtr);
157 if (this->isVkMappable()) {
158 // Not every buffer will use command buffer usage refs and instead the command buffer just
159 // holds normal refs. Systems higher up in Ganesh should be making sure not to reuse a
160 // buffer that currently has a ref held by something else. However, we do need to make sure
161 // there isn't a buffer with just a command buffer usage that is trying to be mapped.
162 SkASSERT(this->internalHasNoCommandBufferUsages());
163 SkASSERT(fAlloc.fSize > 0);
164 SkASSERT(fAlloc.fSize >= size);
165 fMapPtr = GrVkMemory::MapAlloc(this->getVkGpu(), fAlloc);
166 if (fMapPtr && this->intendedType() == GrGpuBufferType::kXferGpuToCpu) {
167 GrVkMemory::InvalidateMappedAlloc(this->getVkGpu(), fAlloc, 0, size);
168 }
169 }
170}
171
172void GrVkBuffer::vkUnmap(size_t size) {
173 SkASSERT(fMapPtr && this->isVkMappable());
174
175 SkASSERT(fAlloc.fSize > 0);
176 SkASSERT(fAlloc.fSize >= size);
177
178 GrVkGpu* gpu = this->getVkGpu();
179 GrVkMemory::FlushMappedAlloc(gpu, fAlloc, 0, size);
180 GrVkMemory::UnmapAlloc(gpu, fAlloc);
181}
182
183static VkAccessFlags buffer_type_to_access_flags(GrGpuBufferType type) {
egdaniel927ac9c2016-09-19 09:32:09 -0700184 switch (type) {
Greg Danielaf1d1932021-02-08 13:55:26 -0500185 case GrGpuBufferType::kIndex:
egdaniel927ac9c2016-09-19 09:32:09 -0700186 return VK_ACCESS_INDEX_READ_BIT;
Greg Danielaf1d1932021-02-08 13:55:26 -0500187 case GrGpuBufferType::kVertex:
egdaniel927ac9c2016-09-19 09:32:09 -0700188 return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
189 default:
190 // This helper is only called for static buffers so we should only ever see index or
191 // vertex buffers types
Greg Danielaf1d1932021-02-08 13:55:26 -0500192 SkUNREACHABLE;
egdaniel927ac9c2016-09-19 09:32:09 -0700193 }
194}
195
Greg Danielaf1d1932021-02-08 13:55:26 -0500196void GrVkBuffer::copyCpuDataToGpuBuffer(const void* src, size_t size) {
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400197 SkASSERT(src);
Greg Danielaf1d1932021-02-08 13:55:26 -0500198
199 GrVkGpu* gpu = this->getVkGpu();
200
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400201 // We should never call this method in protected contexts.
202 SkASSERT(!gpu->protectedContext());
Greg Danielaf1d1932021-02-08 13:55:26 -0500203
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400204 // The vulkan api restricts the use of vkCmdUpdateBuffer to updates that are less than or equal
205 // to 65536 bytes and a size the is 4 byte aligned.
206 if ((size <= 65536) && (0 == (size & 0x3)) && !gpu->vkCaps().avoidUpdateBuffers()) {
Greg Danielaf1d1932021-02-08 13:55:26 -0500207 gpu->updateBuffer(sk_ref_sp(this), src, /*offset=*/0, size);
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400208 } else {
Greg Daniel2e967df2021-02-08 10:38:31 -0500209 GrResourceProvider* resourceProvider = gpu->getContext()->priv().resourceProvider();
210 sk_sp<GrGpuBuffer> transferBuffer = resourceProvider->createBuffer(
211 size, GrGpuBufferType::kXferCpuToGpu, kDynamic_GrAccessPattern, src);
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400212 if (!transferBuffer) {
213 return;
214 }
215
Greg Danielaf1d1932021-02-08 13:55:26 -0500216 gpu->copyBuffer(std::move(transferBuffer), sk_ref_sp(this), /*srcOffset=*/0,
217 /*dstOffset=*/0, size);
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400218 }
Greg Danielaf1d1932021-02-08 13:55:26 -0500219
220 this->addMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT,
221 buffer_type_to_access_flags(this->intendedType()),
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400222 VK_PIPELINE_STAGE_TRANSFER_BIT,
223 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
Greg Danielaf1d1932021-02-08 13:55:26 -0500224 /*byRegion=*/false);
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400225}
226
Greg Danielaf1d1932021-02-08 13:55:26 -0500227void GrVkBuffer::addMemoryBarrier(VkAccessFlags srcAccessMask,
228 VkAccessFlags dstAccesMask,
229 VkPipelineStageFlags srcStageMask,
230 VkPipelineStageFlags dstStageMask,
231 bool byRegion) const {
232 VkBufferMemoryBarrier bufferMemoryBarrier = {
233 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType
234 nullptr, // pNext
235 srcAccessMask, // srcAccessMask
236 dstAccesMask, // dstAccessMask
237 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
238 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
239 fBuffer, // buffer
240 0, // offset
241 this->size(), // size
242 };
Greg Daniel164a9f02016-02-22 09:56:40 -0500243
Greg Danielaf1d1932021-02-08 13:55:26 -0500244 // TODO: restrict to area of buffer we're interested in
245 this->getVkGpu()->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion,
246 &bufferMemoryBarrier);
247}
Greg Daniele35a99e2018-03-02 11:44:22 -0500248
Greg Danielaf1d1932021-02-08 13:55:26 -0500249void GrVkBuffer::vkRelease() {
250 if (this->wasDestroyed()) {
251 return;
252 }
253
254 if (fMapPtr) {
255 this->vkUnmap(this->size());
egdaniel927ac9c2016-09-19 09:32:09 -0700256 fMapPtr = nullptr;
Greg Danielaf1d1932021-02-08 13:55:26 -0500257 }
258
259 if (fUniformDescriptorSet) {
260 fUniformDescriptorSet->recycle();
261 fUniformDescriptorSet = nullptr;
262 }
263
264 SkASSERT(fBuffer);
265 SkASSERT(fAlloc.fMemory && fAlloc.fBackendMemory);
266 VK_CALL(this->getVkGpu(), DestroyBuffer(this->getVkGpu()->device(), fBuffer, nullptr));
267 fBuffer = VK_NULL_HANDLE;
268
269 GrVkMemory::FreeBufferMemory(this->getVkGpu(), fAlloc);
270 fAlloc.fMemory = VK_NULL_HANDLE;
271 fAlloc.fBackendMemory = 0;
272}
273
274void GrVkBuffer::onRelease() {
275 this->vkRelease();
276 this->GrGpuBuffer::onRelease();
277}
278
279void GrVkBuffer::onAbandon() {
280 this->vkRelease();
281 this->GrGpuBuffer::onAbandon();
282}
283
284void GrVkBuffer::onMap() {
285 if (!this->wasDestroyed()) {
286 this->vkMap(this->size());
jvanverth069c4642016-07-06 12:56:11 -0700287 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500288}
289
Greg Danielaf1d1932021-02-08 13:55:26 -0500290void GrVkBuffer::onUnmap() {
291 if (!this->wasDestroyed()) {
292 this->vkUnmap(this->size());
293 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500294}
295
Greg Danielaf1d1932021-02-08 13:55:26 -0500296bool GrVkBuffer::onUpdateData(const void* src, size_t srcSizeInBytes) {
297 if (this->wasDestroyed()) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500298 return false;
299 }
300
Greg Danielaf1d1932021-02-08 13:55:26 -0500301 if (srcSizeInBytes > this->size()) {
302 return false;
303 }
304
305 if (this->isVkMappable()) {
306 this->vkMap(srcSizeInBytes);
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400307 if (!fMapPtr) {
308 return false;
309 }
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400310 memcpy(fMapPtr, src, srcSizeInBytes);
Greg Danielaf1d1932021-02-08 13:55:26 -0500311 this->vkUnmap(srcSizeInBytes);
312 fMapPtr = nullptr;
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400313 } else {
Greg Danielaf1d1932021-02-08 13:55:26 -0500314 this->copyCpuDataToGpuBuffer(src, srcSizeInBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500315 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500316 return true;
317}
318
Greg Danielaf1d1932021-02-08 13:55:26 -0500319GrVkGpu* GrVkBuffer::getVkGpu() const {
320 SkASSERT(!this->wasDestroyed());
321 return static_cast<GrVkGpu*>(this->getGpu());
Greg Daniel164a9f02016-02-22 09:56:40 -0500322}
Greg Danielaf1d1932021-02-08 13:55:26 -0500323
324const VkDescriptorSet* GrVkBuffer::uniformDescriptorSet() const {
325 SkASSERT(fUniformDescriptorSet);
326 return fUniformDescriptorSet->descriptorSet();
327}
328