blob: 8495f9b0a31eb4436f597d4ea2399a7469965efa [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkBuffer.h"
9#include "GrVkGpu.h"
10#include "GrVkMemory.h"
Greg Daniel6888c0d2017-08-25 11:55:50 -040011#include "GrVkTransferBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkUtil.h"
13
14#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
15
16#ifdef SK_DEBUG
17#define VALIDATE() this->validate()
18#else
19#define VALIDATE() do {} while(false)
20#endif
21
22const GrVkBuffer::Resource* GrVkBuffer::Create(const GrVkGpu* gpu, const Desc& desc) {
23 VkBuffer buffer;
jvanverth1e305ba2016-06-01 09:39:15 -070024 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -050025
26 // create the buffer object
27 VkBufferCreateInfo bufInfo;
28 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
29 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
30 bufInfo.flags = 0;
31 bufInfo.size = desc.fSizeInBytes;
32 switch (desc.fType) {
jvanvertha584de92016-06-30 09:10:52 -070033 case kVertex_Type:
34 bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
35 break;
36 case kIndex_Type:
37 bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
38 break;
39 case kUniform_Type:
40 bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
41 break;
42 case kCopyRead_Type:
43 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
44 break;
45 case kCopyWrite_Type:
46 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
47 break;
Greg Danielc2dd5ed2017-05-05 13:49:11 -040048 case kTexel_Type:
49 bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -050050 }
jvanverth68c3d302016-09-23 10:30:04 -070051 if (!desc.fDynamic) {
52 bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
53 }
jvanvertha584de92016-06-30 09:10:52 -070054
Greg Daniel164a9f02016-02-22 09:56:40 -050055 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
56 bufInfo.queueFamilyIndexCount = 0;
57 bufInfo.pQueueFamilyIndices = nullptr;
58
59 VkResult err;
60 err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer));
61 if (err) {
62 return nullptr;
63 }
64
Greg Daniel164a9f02016-02-22 09:56:40 -050065 if (!GrVkMemory::AllocAndBindBufferMemory(gpu,
66 buffer,
jvanverth6b6ffc42016-06-13 14:28:07 -070067 desc.fType,
jvanvertha584de92016-06-30 09:10:52 -070068 desc.fDynamic,
Greg Daniel164a9f02016-02-22 09:56:40 -050069 &alloc)) {
jvanverth6b6ffc42016-06-13 14:28:07 -070070 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -050071 }
72
jvanverth6b6ffc42016-06-13 14:28:07 -070073 const GrVkBuffer::Resource* resource = new GrVkBuffer::Resource(buffer, alloc, desc.fType);
Greg Daniel164a9f02016-02-22 09:56:40 -050074 if (!resource) {
75 VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -070076 GrVkMemory::FreeBufferMemory(gpu, desc.fType, alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050077 return nullptr;
78 }
79
80 return resource;
81}
82
Greg Daniel164a9f02016-02-22 09:56:40 -050083void GrVkBuffer::addMemoryBarrier(const GrVkGpu* gpu,
84 VkAccessFlags srcAccessMask,
85 VkAccessFlags dstAccesMask,
86 VkPipelineStageFlags srcStageMask,
87 VkPipelineStageFlags dstStageMask,
88 bool byRegion) const {
89 VkBufferMemoryBarrier bufferMemoryBarrier = {
90 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -040091 nullptr, // pNext
Greg Daniel164a9f02016-02-22 09:56:40 -050092 srcAccessMask, // srcAccessMask
93 dstAccesMask, // dstAccessMask
94 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
95 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
96 this->buffer(), // buffer
97 0, // offset
98 fDesc.fSizeInBytes, // size
99 };
100
101 // TODO: restrict to area of buffer we're interested in
Greg Daniel59dc1482019-02-22 10:46:38 -0500102 gpu->addBufferMemoryBarrier(this->resource(), srcStageMask, dstStageMask, byRegion,
103 &bufferMemoryBarrier);
Greg Daniel164a9f02016-02-22 09:56:40 -0500104}
105
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500106void GrVkBuffer::Resource::freeGPUData(GrVkGpu* gpu) const {
Greg Daniel164a9f02016-02-22 09:56:40 -0500107 SkASSERT(fBuffer);
jvanverth1e305ba2016-06-01 09:39:15 -0700108 SkASSERT(fAlloc.fMemory);
Greg Daniel164a9f02016-02-22 09:56:40 -0500109 VK_CALL(gpu, DestroyBuffer(gpu->device(), fBuffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700110 GrVkMemory::FreeBufferMemory(gpu, fType, fAlloc);
Greg Daniel164a9f02016-02-22 09:56:40 -0500111}
112
113void GrVkBuffer::vkRelease(const GrVkGpu* gpu) {
114 VALIDATE();
jvanverth4c6e47a2016-07-22 10:34:52 -0700115 fResource->recycle(const_cast<GrVkGpu*>(gpu));
Greg Daniel164a9f02016-02-22 09:56:40 -0500116 fResource = nullptr;
Greg Danielf9f27232017-01-06 14:40:08 -0500117 if (!fDesc.fDynamic) {
118 delete[] (unsigned char*)fMapPtr;
119 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500120 fMapPtr = nullptr;
121 VALIDATE();
122}
123
124void GrVkBuffer::vkAbandon() {
125 fResource->unrefAndAbandon();
jvanverthaf236b52016-05-20 06:01:06 -0700126 fResource = nullptr;
Greg Danielf9f27232017-01-06 14:40:08 -0500127 if (!fDesc.fDynamic) {
128 delete[] (unsigned char*)fMapPtr;
129 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500130 fMapPtr = nullptr;
131 VALIDATE();
132}
133
egdaniel927ac9c2016-09-19 09:32:09 -0700134VkAccessFlags buffer_type_to_access_flags(GrVkBuffer::Type type) {
135 switch (type) {
136 case GrVkBuffer::kIndex_Type:
137 return VK_ACCESS_INDEX_READ_BIT;
138 case GrVkBuffer::kVertex_Type:
139 return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
140 default:
141 // This helper is only called for static buffers so we should only ever see index or
142 // vertex buffers types
143 SkASSERT(false);
144 return 0;
145 }
146}
147
148void GrVkBuffer::internalMap(GrVkGpu* gpu, size_t size, bool* createdNewBuffer) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500149 VALIDATE();
150 SkASSERT(!this->vkIsMapped());
egdaniel927ac9c2016-09-19 09:32:09 -0700151
jvanverthdbb429a2016-03-16 06:47:39 -0700152 if (!fResource->unique()) {
egdaniel927ac9c2016-09-19 09:32:09 -0700153 if (fDesc.fDynamic) {
154 // in use by the command buffer, so we need to create a new one
155 fResource->recycle(gpu);
156 fResource = this->createResource(gpu, fDesc);
157 if (createdNewBuffer) {
158 *createdNewBuffer = true;
159 }
160 } else {
161 SkASSERT(fMapPtr);
162 this->addMemoryBarrier(gpu,
163 buffer_type_to_access_flags(fDesc.fType),
164 VK_ACCESS_TRANSFER_WRITE_BIT,
165 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
166 VK_PIPELINE_STAGE_TRANSFER_BIT,
167 false);
168 }
jvanverthdbb429a2016-03-16 06:47:39 -0700169 }
jvanverth910114a2016-03-08 12:09:27 -0800170
jvanverth069c4642016-07-06 12:56:11 -0700171 if (fDesc.fDynamic) {
172 const GrVkAlloc& alloc = this->alloc();
Greg Daniel8385a8a2018-02-26 13:29:37 -0500173 SkASSERT(alloc.fSize > 0);
Greg Daniel81df0412018-05-31 13:13:33 -0400174 SkASSERT(alloc.fSize >= size);
175 SkASSERT(0 == fOffset);
Greg Daniel8385a8a2018-02-26 13:29:37 -0500176
Greg Daniel81df0412018-05-31 13:13:33 -0400177 fMapPtr = GrVkMemory::MapAlloc(gpu, alloc);
jvanverth069c4642016-07-06 12:56:11 -0700178 } else {
egdaniel927ac9c2016-09-19 09:32:09 -0700179 if (!fMapPtr) {
180 fMapPtr = new unsigned char[this->size()];
181 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500182 }
183
184 VALIDATE();
Greg Daniel164a9f02016-02-22 09:56:40 -0500185}
186
egdaniel927ac9c2016-09-19 09:32:09 -0700187void GrVkBuffer::internalUnmap(GrVkGpu* gpu, size_t size) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500188 VALIDATE();
189 SkASSERT(this->vkIsMapped());
190
jvanverth069c4642016-07-06 12:56:11 -0700191 if (fDesc.fDynamic) {
Greg Daniel81df0412018-05-31 13:13:33 -0400192 const GrVkAlloc& alloc = this->alloc();
193 SkASSERT(alloc.fSize > 0);
194 SkASSERT(alloc.fSize >= size);
Greg Daniele35a99e2018-03-02 11:44:22 -0500195 // We currently don't use fOffset
196 SkASSERT(0 == fOffset);
Greg Daniele35a99e2018-03-02 11:44:22 -0500197
Greg Daniel81df0412018-05-31 13:13:33 -0400198 GrVkMemory::FlushMappedAlloc(gpu, alloc, 0, size);
199 GrVkMemory::UnmapAlloc(gpu, alloc);
egdaniel927ac9c2016-09-19 09:32:09 -0700200 fMapPtr = nullptr;
jvanverth069c4642016-07-06 12:56:11 -0700201 } else {
Chris Daltondeb79982018-01-10 15:16:05 -0500202 // vkCmdUpdateBuffer requires size < 64k and 4-byte alignment.
203 // https://bugs.chromium.org/p/skia/issues/detail?id=7488
204 if (size <= 65536 && 0 == (size & 0x3)) {
Greg Daniel6888c0d2017-08-25 11:55:50 -0400205 gpu->updateBuffer(this, fMapPtr, this->offset(), size);
206 } else {
Brian Salomon12d22642019-01-29 14:38:50 -0500207 sk_sp<GrVkTransferBuffer> transferBuffer =
208 GrVkTransferBuffer::Make(gpu, size, GrVkBuffer::kCopyRead_Type);
Greg Daniel81df0412018-05-31 13:13:33 -0400209 if (!transferBuffer) {
Greg Daniel6888c0d2017-08-25 11:55:50 -0400210 return;
211 }
212
213 char* buffer = (char*) transferBuffer->map();
214 memcpy (buffer, fMapPtr, size);
215 transferBuffer->unmap();
216
Brian Salomon12d22642019-01-29 14:38:50 -0500217 gpu->copyBuffer(transferBuffer.get(), this, 0, this->offset(), size);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400218 }
egdaniel927ac9c2016-09-19 09:32:09 -0700219 this->addMemoryBarrier(gpu,
220 VK_ACCESS_TRANSFER_WRITE_BIT,
221 buffer_type_to_access_flags(fDesc.fType),
222 VK_PIPELINE_STAGE_TRANSFER_BIT,
223 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
224 false);
jvanverth069c4642016-07-06 12:56:11 -0700225 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500226}
227
228bool GrVkBuffer::vkIsMapped() const {
229 VALIDATE();
230 return SkToBool(fMapPtr);
231}
232
jvanvertha584de92016-06-30 09:10:52 -0700233bool GrVkBuffer::vkUpdateData(GrVkGpu* gpu, const void* src, size_t srcSizeInBytes,
egdaniel7cbffda2016-04-08 13:27:53 -0700234 bool* createdNewBuffer) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500235 if (srcSizeInBytes > fDesc.fSizeInBytes) {
236 return false;
237 }
238
egdaniel927ac9c2016-09-19 09:32:09 -0700239 this->internalMap(gpu, srcSizeInBytes, createdNewBuffer);
240 if (!fMapPtr) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500241 return false;
242 }
243
egdaniel927ac9c2016-09-19 09:32:09 -0700244 memcpy(fMapPtr, src, srcSizeInBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500245
egdaniel927ac9c2016-09-19 09:32:09 -0700246 this->internalUnmap(gpu, srcSizeInBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500247
248 return true;
249}
250
251void GrVkBuffer::validate() const {
252 SkASSERT(!fResource || kVertex_Type == fDesc.fType || kIndex_Type == fDesc.fType
Chris Dalton6b65b982017-07-06 11:04:00 -0600253 || kTexel_Type == fDesc.fType || kCopyRead_Type == fDesc.fType
254 || kCopyWrite_Type == fDesc.fType || kUniform_Type == fDesc.fType);
Greg Daniel164a9f02016-02-22 09:56:40 -0500255}