Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrVkBuffer.h" |
| 9 | #include "GrVkGpu.h" |
| 10 | #include "GrVkMemory.h" |
Greg Daniel | 6888c0d | 2017-08-25 11:55:50 -0400 | [diff] [blame] | 11 | #include "GrVkTransferBuffer.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 12 | #include "GrVkUtil.h" |
| 13 | |
| 14 | #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X) |
| 15 | |
| 16 | #ifdef SK_DEBUG |
| 17 | #define VALIDATE() this->validate() |
| 18 | #else |
| 19 | #define VALIDATE() do {} while(false) |
| 20 | #endif |
| 21 | |
| 22 | const GrVkBuffer::Resource* GrVkBuffer::Create(const GrVkGpu* gpu, const Desc& desc) { |
| 23 | VkBuffer buffer; |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 24 | GrVkAlloc alloc; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 25 | |
| 26 | // create the buffer object |
| 27 | VkBufferCreateInfo bufInfo; |
| 28 | memset(&bufInfo, 0, sizeof(VkBufferCreateInfo)); |
| 29 | bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 30 | bufInfo.flags = 0; |
| 31 | bufInfo.size = desc.fSizeInBytes; |
| 32 | switch (desc.fType) { |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 33 | case kVertex_Type: |
| 34 | bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; |
| 35 | break; |
| 36 | case kIndex_Type: |
| 37 | bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT; |
| 38 | break; |
| 39 | case kUniform_Type: |
| 40 | bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT; |
| 41 | break; |
| 42 | case kCopyRead_Type: |
| 43 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; |
| 44 | break; |
| 45 | case kCopyWrite_Type: |
| 46 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 47 | break; |
Greg Daniel | c2dd5ed | 2017-05-05 13:49:11 -0400 | [diff] [blame] | 48 | case kTexel_Type: |
| 49 | bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 50 | } |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 51 | if (!desc.fDynamic) { |
| 52 | bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 53 | } |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 54 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 55 | bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; |
| 56 | bufInfo.queueFamilyIndexCount = 0; |
| 57 | bufInfo.pQueueFamilyIndices = nullptr; |
| 58 | |
| 59 | VkResult err; |
| 60 | err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer)); |
| 61 | if (err) { |
| 62 | return nullptr; |
| 63 | } |
| 64 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 65 | if (!GrVkMemory::AllocAndBindBufferMemory(gpu, |
| 66 | buffer, |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 67 | desc.fType, |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 68 | desc.fDynamic, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 69 | &alloc)) { |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 70 | return nullptr; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 71 | } |
| 72 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 73 | const GrVkBuffer::Resource* resource = new GrVkBuffer::Resource(buffer, alloc, desc.fType); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 74 | if (!resource) { |
| 75 | VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 76 | GrVkMemory::FreeBufferMemory(gpu, desc.fType, alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 77 | return nullptr; |
| 78 | } |
| 79 | |
| 80 | return resource; |
| 81 | } |
| 82 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 83 | void GrVkBuffer::addMemoryBarrier(const GrVkGpu* gpu, |
| 84 | VkAccessFlags srcAccessMask, |
| 85 | VkAccessFlags dstAccesMask, |
| 86 | VkPipelineStageFlags srcStageMask, |
| 87 | VkPipelineStageFlags dstStageMask, |
| 88 | bool byRegion) const { |
| 89 | VkBufferMemoryBarrier bufferMemoryBarrier = { |
| 90 | VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType |
Ben Wagner | a93a14a | 2017-08-28 10:34:05 -0400 | [diff] [blame] | 91 | nullptr, // pNext |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 92 | srcAccessMask, // srcAccessMask |
| 93 | dstAccesMask, // dstAccessMask |
| 94 | VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex |
| 95 | VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex |
| 96 | this->buffer(), // buffer |
| 97 | 0, // offset |
| 98 | fDesc.fSizeInBytes, // size |
| 99 | }; |
| 100 | |
| 101 | // TODO: restrict to area of buffer we're interested in |
| 102 | gpu->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion, &bufferMemoryBarrier); |
| 103 | } |
| 104 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 105 | void GrVkBuffer::Resource::freeGPUData(GrVkGpu* gpu) const { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 106 | SkASSERT(fBuffer); |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 107 | SkASSERT(fAlloc.fMemory); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 108 | VK_CALL(gpu, DestroyBuffer(gpu->device(), fBuffer, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 109 | GrVkMemory::FreeBufferMemory(gpu, fType, fAlloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 110 | } |
| 111 | |
| 112 | void GrVkBuffer::vkRelease(const GrVkGpu* gpu) { |
| 113 | VALIDATE(); |
jvanverth | 4c6e47a | 2016-07-22 10:34:52 -0700 | [diff] [blame] | 114 | fResource->recycle(const_cast<GrVkGpu*>(gpu)); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 115 | fResource = nullptr; |
Greg Daniel | f9f2723 | 2017-01-06 14:40:08 -0500 | [diff] [blame] | 116 | if (!fDesc.fDynamic) { |
| 117 | delete[] (unsigned char*)fMapPtr; |
| 118 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 119 | fMapPtr = nullptr; |
| 120 | VALIDATE(); |
| 121 | } |
| 122 | |
| 123 | void GrVkBuffer::vkAbandon() { |
| 124 | fResource->unrefAndAbandon(); |
jvanverth | af236b5 | 2016-05-20 06:01:06 -0700 | [diff] [blame] | 125 | fResource = nullptr; |
Greg Daniel | f9f2723 | 2017-01-06 14:40:08 -0500 | [diff] [blame] | 126 | if (!fDesc.fDynamic) { |
| 127 | delete[] (unsigned char*)fMapPtr; |
| 128 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 129 | fMapPtr = nullptr; |
| 130 | VALIDATE(); |
| 131 | } |
| 132 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 133 | VkAccessFlags buffer_type_to_access_flags(GrVkBuffer::Type type) { |
| 134 | switch (type) { |
| 135 | case GrVkBuffer::kIndex_Type: |
| 136 | return VK_ACCESS_INDEX_READ_BIT; |
| 137 | case GrVkBuffer::kVertex_Type: |
| 138 | return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; |
| 139 | default: |
| 140 | // This helper is only called for static buffers so we should only ever see index or |
| 141 | // vertex buffers types |
| 142 | SkASSERT(false); |
| 143 | return 0; |
| 144 | } |
| 145 | } |
| 146 | |
| 147 | void GrVkBuffer::internalMap(GrVkGpu* gpu, size_t size, bool* createdNewBuffer) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 148 | VALIDATE(); |
| 149 | SkASSERT(!this->vkIsMapped()); |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 150 | |
jvanverth | dbb429a | 2016-03-16 06:47:39 -0700 | [diff] [blame] | 151 | if (!fResource->unique()) { |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 152 | if (fDesc.fDynamic) { |
| 153 | // in use by the command buffer, so we need to create a new one |
| 154 | fResource->recycle(gpu); |
| 155 | fResource = this->createResource(gpu, fDesc); |
| 156 | if (createdNewBuffer) { |
| 157 | *createdNewBuffer = true; |
| 158 | } |
| 159 | } else { |
| 160 | SkASSERT(fMapPtr); |
| 161 | this->addMemoryBarrier(gpu, |
| 162 | buffer_type_to_access_flags(fDesc.fType), |
| 163 | VK_ACCESS_TRANSFER_WRITE_BIT, |
| 164 | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, |
| 165 | VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 166 | false); |
| 167 | } |
jvanverth | dbb429a | 2016-03-16 06:47:39 -0700 | [diff] [blame] | 168 | } |
jvanverth | 910114a | 2016-03-08 12:09:27 -0800 | [diff] [blame] | 169 | |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 170 | if (fDesc.fDynamic) { |
| 171 | const GrVkAlloc& alloc = this->alloc(); |
Greg Daniel | 8385a8a | 2018-02-26 13:29:37 -0500 | [diff] [blame] | 172 | SkASSERT(alloc.fSize > 0); |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 173 | SkASSERT(alloc.fSize >= size); |
| 174 | SkASSERT(0 == fOffset); |
Greg Daniel | 8385a8a | 2018-02-26 13:29:37 -0500 | [diff] [blame] | 175 | |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 176 | fMapPtr = GrVkMemory::MapAlloc(gpu, alloc); |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 177 | } else { |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 178 | if (!fMapPtr) { |
| 179 | fMapPtr = new unsigned char[this->size()]; |
| 180 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 181 | } |
| 182 | |
| 183 | VALIDATE(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 184 | } |
| 185 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 186 | void GrVkBuffer::internalUnmap(GrVkGpu* gpu, size_t size) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 187 | VALIDATE(); |
| 188 | SkASSERT(this->vkIsMapped()); |
| 189 | |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 190 | if (fDesc.fDynamic) { |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 191 | const GrVkAlloc& alloc = this->alloc(); |
| 192 | SkASSERT(alloc.fSize > 0); |
| 193 | SkASSERT(alloc.fSize >= size); |
Greg Daniel | e35a99e | 2018-03-02 11:44:22 -0500 | [diff] [blame] | 194 | // We currently don't use fOffset |
| 195 | SkASSERT(0 == fOffset); |
Greg Daniel | e35a99e | 2018-03-02 11:44:22 -0500 | [diff] [blame] | 196 | |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 197 | GrVkMemory::FlushMappedAlloc(gpu, alloc, 0, size); |
| 198 | GrVkMemory::UnmapAlloc(gpu, alloc); |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 199 | fMapPtr = nullptr; |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 200 | } else { |
Chris Dalton | deb7998 | 2018-01-10 15:16:05 -0500 | [diff] [blame] | 201 | // vkCmdUpdateBuffer requires size < 64k and 4-byte alignment. |
| 202 | // https://bugs.chromium.org/p/skia/issues/detail?id=7488 |
| 203 | if (size <= 65536 && 0 == (size & 0x3)) { |
Greg Daniel | 6888c0d | 2017-08-25 11:55:50 -0400 | [diff] [blame] | 204 | gpu->updateBuffer(this, fMapPtr, this->offset(), size); |
| 205 | } else { |
Brian Salomon | 12d2264 | 2019-01-29 14:38:50 -0500 | [diff] [blame] | 206 | sk_sp<GrVkTransferBuffer> transferBuffer = |
| 207 | GrVkTransferBuffer::Make(gpu, size, GrVkBuffer::kCopyRead_Type); |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 208 | if (!transferBuffer) { |
Greg Daniel | 6888c0d | 2017-08-25 11:55:50 -0400 | [diff] [blame] | 209 | return; |
| 210 | } |
| 211 | |
| 212 | char* buffer = (char*) transferBuffer->map(); |
| 213 | memcpy (buffer, fMapPtr, size); |
| 214 | transferBuffer->unmap(); |
| 215 | |
Brian Salomon | 12d2264 | 2019-01-29 14:38:50 -0500 | [diff] [blame] | 216 | gpu->copyBuffer(transferBuffer.get(), this, 0, this->offset(), size); |
Greg Daniel | 6888c0d | 2017-08-25 11:55:50 -0400 | [diff] [blame] | 217 | } |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 218 | this->addMemoryBarrier(gpu, |
| 219 | VK_ACCESS_TRANSFER_WRITE_BIT, |
| 220 | buffer_type_to_access_flags(fDesc.fType), |
| 221 | VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 222 | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, |
| 223 | false); |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 224 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 225 | } |
| 226 | |
| 227 | bool GrVkBuffer::vkIsMapped() const { |
| 228 | VALIDATE(); |
| 229 | return SkToBool(fMapPtr); |
| 230 | } |
| 231 | |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 232 | bool GrVkBuffer::vkUpdateData(GrVkGpu* gpu, const void* src, size_t srcSizeInBytes, |
egdaniel | 7cbffda | 2016-04-08 13:27:53 -0700 | [diff] [blame] | 233 | bool* createdNewBuffer) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 234 | if (srcSizeInBytes > fDesc.fSizeInBytes) { |
| 235 | return false; |
| 236 | } |
| 237 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 238 | this->internalMap(gpu, srcSizeInBytes, createdNewBuffer); |
| 239 | if (!fMapPtr) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 240 | return false; |
| 241 | } |
| 242 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 243 | memcpy(fMapPtr, src, srcSizeInBytes); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 244 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 245 | this->internalUnmap(gpu, srcSizeInBytes); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 246 | |
| 247 | return true; |
| 248 | } |
| 249 | |
| 250 | void GrVkBuffer::validate() const { |
| 251 | SkASSERT(!fResource || kVertex_Type == fDesc.fType || kIndex_Type == fDesc.fType |
Chris Dalton | 6b65b98 | 2017-07-06 11:04:00 -0600 | [diff] [blame] | 252 | || kTexel_Type == fDesc.fType || kCopyRead_Type == fDesc.fType |
| 253 | || kCopyWrite_Type == fDesc.fType || kUniform_Type == fDesc.fType); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 254 | } |