Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 2 | * Copyright 2021 Google LLC |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/vk/GrVkBuffer.h" |
Greg Daniel | 87d784f | 2021-02-02 15:36:06 -0500 | [diff] [blame] | 9 | |
Greg Daniel | 2e967df | 2021-02-08 10:38:31 -0500 | [diff] [blame] | 10 | #include "include/gpu/GrDirectContext.h" |
| 11 | #include "src/gpu/GrDirectContextPriv.h" |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 12 | #include "src/gpu/GrResourceProvider.h" |
| 13 | #include "src/gpu/vk/GrVkDescriptorSet.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "src/gpu/vk/GrVkGpu.h" |
| 15 | #include "src/gpu/vk/GrVkMemory.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 16 | #include "src/gpu/vk/GrVkUtil.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 17 | |
| 18 | #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X) |
| 19 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 20 | GrVkBuffer::GrVkBuffer(GrVkGpu* gpu, |
| 21 | size_t sizeInBytes, |
| 22 | GrGpuBufferType bufferType, |
| 23 | GrAccessPattern accessPattern, |
| 24 | VkBuffer buffer, |
| 25 | const GrVkAlloc& alloc, |
| 26 | const GrVkDescriptorSet* uniformDescriptorSet) |
| 27 | : GrGpuBuffer(gpu, sizeInBytes, bufferType, accessPattern) |
| 28 | , fBuffer(buffer) |
| 29 | , fAlloc(alloc) |
| 30 | , fUniformDescriptorSet(uniformDescriptorSet) { |
| 31 | // We always require dynamic buffers to be mappable |
| 32 | SkASSERT(accessPattern != kDynamic_GrAccessPattern || this->isVkMappable()); |
| 33 | SkASSERT(bufferType != GrGpuBufferType::kUniform || uniformDescriptorSet); |
| 34 | this->registerWithCache(SkBudgeted::kYes); |
Greg Daniel | dd520a3 | 2021-02-02 10:37:29 -0500 | [diff] [blame] | 35 | } |
| 36 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 37 | static const GrVkDescriptorSet* make_uniform_desc_set(GrVkGpu* gpu, VkBuffer buffer, size_t size) { |
| 38 | const GrVkDescriptorSet* descriptorSet = gpu->resourceProvider().getUniformDescriptorSet(); |
| 39 | if (!descriptorSet) { |
| 40 | return nullptr; |
| 41 | } |
| 42 | |
| 43 | VkDescriptorBufferInfo bufferInfo; |
| 44 | memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo)); |
| 45 | bufferInfo.buffer = buffer; |
| 46 | bufferInfo.offset = 0; |
| 47 | bufferInfo.range = size; |
| 48 | |
| 49 | VkWriteDescriptorSet descriptorWrite; |
| 50 | memset(&descriptorWrite, 0, sizeof(VkWriteDescriptorSet)); |
| 51 | descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; |
| 52 | descriptorWrite.pNext = nullptr; |
| 53 | descriptorWrite.dstSet = *descriptorSet->descriptorSet(); |
| 54 | descriptorWrite.dstBinding = GrVkUniformHandler::kUniformBinding; |
| 55 | descriptorWrite.dstArrayElement = 0; |
| 56 | descriptorWrite.descriptorCount = 1; |
| 57 | descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 58 | descriptorWrite.pImageInfo = nullptr; |
| 59 | descriptorWrite.pBufferInfo = &bufferInfo; |
| 60 | descriptorWrite.pTexelBufferView = nullptr; |
| 61 | |
| 62 | GR_VK_CALL(gpu->vkInterface(), |
| 63 | UpdateDescriptorSets(gpu->device(), 1, &descriptorWrite, 0, nullptr)); |
| 64 | return descriptorSet; |
| 65 | } |
| 66 | |
| 67 | sk_sp<GrVkBuffer> GrVkBuffer::Make(GrVkGpu* gpu, |
| 68 | size_t size, |
| 69 | GrGpuBufferType bufferType, |
| 70 | GrAccessPattern accessPattern) { |
| 71 | VkBuffer buffer; |
| 72 | GrVkAlloc alloc; |
| 73 | |
| 74 | // The only time we don't require mappable buffers is when we have a static access pattern and |
| 75 | // we're on a device where gpu only memory has faster reads on the gpu than memory that is also |
| 76 | // mappable on the cpu. Protected memory always uses mappable buffers. |
| 77 | bool requiresMappable = gpu->protectedContext() || |
| 78 | accessPattern == kDynamic_GrAccessPattern || |
| 79 | accessPattern == kStream_GrAccessPattern || |
| 80 | !gpu->vkCaps().gpuOnlyBuffersMorePerformant(); |
| 81 | |
| 82 | using BufferUsage = GrVkMemoryAllocator::BufferUsage; |
| 83 | BufferUsage allocUsage; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 84 | |
| 85 | // create the buffer object |
| 86 | VkBufferCreateInfo bufInfo; |
| 87 | memset(&bufInfo, 0, sizeof(VkBufferCreateInfo)); |
| 88 | bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 89 | bufInfo.flags = 0; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 90 | bufInfo.size = size; |
| 91 | switch (bufferType) { |
| 92 | case GrGpuBufferType::kVertex: |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 93 | bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 94 | allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly; |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 95 | break; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 96 | case GrGpuBufferType::kIndex: |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 97 | bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 98 | allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly; |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 99 | break; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 100 | case GrGpuBufferType::kDrawIndirect: |
Chris Dalton | 03fdf6a | 2020-04-07 12:31:59 -0600 | [diff] [blame] | 101 | bufInfo.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 102 | allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly; |
Chris Dalton | 03fdf6a | 2020-04-07 12:31:59 -0600 | [diff] [blame] | 103 | break; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 104 | case GrGpuBufferType::kUniform: |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 105 | bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 106 | allocUsage = BufferUsage::kCpuWritesGpuReads; |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 107 | break; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 108 | case GrGpuBufferType::kXferCpuToGpu: |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 109 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; |
Greg Daniel | 9408a61 | 2021-02-11 16:32:26 -0500 | [diff] [blame] | 110 | allocUsage = BufferUsage::kTransfersFromCpuToGpu; |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 111 | break; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 112 | case GrGpuBufferType::kXferGpuToCpu: |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 113 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
Greg Daniel | 84b0233 | 2021-02-10 13:55:48 -0500 | [diff] [blame] | 114 | allocUsage = BufferUsage::kTransfersFromGpuToCpu; |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 115 | break; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 116 | } |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 117 | // We may not always get a mappable buffer for non dynamic access buffers. Thus we set the |
| 118 | // transfer dst usage bit in case we need to do a copy to write data. |
| 119 | // TODO: It doesn't really hurt setting this extra usage flag, but maybe we can narrow the scope |
| 120 | // of buffers we set it on more than just not dynamic. |
| 121 | if (!requiresMappable) { |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 122 | bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 123 | } |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 124 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 125 | bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; |
| 126 | bufInfo.queueFamilyIndexCount = 0; |
| 127 | bufInfo.pQueueFamilyIndices = nullptr; |
| 128 | |
| 129 | VkResult err; |
| 130 | err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer)); |
| 131 | if (err) { |
| 132 | return nullptr; |
| 133 | } |
| 134 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 135 | if (!GrVkMemory::AllocAndBindBufferMemory(gpu, buffer, allocUsage, &alloc)) { |
Greg Daniel | 8eb119a | 2021-02-04 09:41:19 -0500 | [diff] [blame] | 136 | VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 137 | return nullptr; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 138 | } |
| 139 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 140 | // If this is a uniform buffer we must setup a descriptor set |
| 141 | const GrVkDescriptorSet* uniformDescSet = nullptr; |
| 142 | if (bufferType == GrGpuBufferType::kUniform) { |
| 143 | uniformDescSet = make_uniform_desc_set(gpu, buffer, size); |
| 144 | if (!uniformDescSet) { |
| 145 | VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr)); |
| 146 | GrVkMemory::FreeBufferMemory(gpu, alloc); |
| 147 | return nullptr; |
Greg Daniel | b7bcabb | 2020-07-20 09:17:24 -0400 | [diff] [blame] | 148 | } |
Greg Daniel | 9d02a4c | 2020-07-15 14:26:08 -0400 | [diff] [blame] | 149 | } |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 150 | |
| 151 | return sk_sp<GrVkBuffer>(new GrVkBuffer(gpu, size, bufferType, accessPattern, buffer, alloc, |
| 152 | uniformDescSet)); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 153 | } |
| 154 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 155 | void GrVkBuffer::vkMap(size_t size) { |
| 156 | SkASSERT(!fMapPtr); |
| 157 | if (this->isVkMappable()) { |
| 158 | // Not every buffer will use command buffer usage refs and instead the command buffer just |
| 159 | // holds normal refs. Systems higher up in Ganesh should be making sure not to reuse a |
| 160 | // buffer that currently has a ref held by something else. However, we do need to make sure |
| 161 | // there isn't a buffer with just a command buffer usage that is trying to be mapped. |
| 162 | SkASSERT(this->internalHasNoCommandBufferUsages()); |
| 163 | SkASSERT(fAlloc.fSize > 0); |
| 164 | SkASSERT(fAlloc.fSize >= size); |
| 165 | fMapPtr = GrVkMemory::MapAlloc(this->getVkGpu(), fAlloc); |
| 166 | if (fMapPtr && this->intendedType() == GrGpuBufferType::kXferGpuToCpu) { |
| 167 | GrVkMemory::InvalidateMappedAlloc(this->getVkGpu(), fAlloc, 0, size); |
| 168 | } |
| 169 | } |
| 170 | } |
| 171 | |
| 172 | void GrVkBuffer::vkUnmap(size_t size) { |
| 173 | SkASSERT(fMapPtr && this->isVkMappable()); |
| 174 | |
| 175 | SkASSERT(fAlloc.fSize > 0); |
| 176 | SkASSERT(fAlloc.fSize >= size); |
| 177 | |
| 178 | GrVkGpu* gpu = this->getVkGpu(); |
| 179 | GrVkMemory::FlushMappedAlloc(gpu, fAlloc, 0, size); |
| 180 | GrVkMemory::UnmapAlloc(gpu, fAlloc); |
| 181 | } |
| 182 | |
| 183 | static VkAccessFlags buffer_type_to_access_flags(GrGpuBufferType type) { |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 184 | switch (type) { |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 185 | case GrGpuBufferType::kIndex: |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 186 | return VK_ACCESS_INDEX_READ_BIT; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 187 | case GrGpuBufferType::kVertex: |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 188 | return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; |
| 189 | default: |
| 190 | // This helper is only called for static buffers so we should only ever see index or |
| 191 | // vertex buffers types |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 192 | SkUNREACHABLE; |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 193 | } |
| 194 | } |
| 195 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 196 | void GrVkBuffer::copyCpuDataToGpuBuffer(const void* src, size_t size) { |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 197 | SkASSERT(src); |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 198 | |
| 199 | GrVkGpu* gpu = this->getVkGpu(); |
| 200 | |
Emircan Uysaler | 23ca4e7 | 2019-06-24 10:53:09 -0400 | [diff] [blame] | 201 | // We should never call this method in protected contexts. |
| 202 | SkASSERT(!gpu->protectedContext()); |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 203 | |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 204 | // The vulkan api restricts the use of vkCmdUpdateBuffer to updates that are less than or equal |
| 205 | // to 65536 bytes and a size the is 4 byte aligned. |
| 206 | if ((size <= 65536) && (0 == (size & 0x3)) && !gpu->vkCaps().avoidUpdateBuffers()) { |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 207 | gpu->updateBuffer(sk_ref_sp(this), src, /*offset=*/0, size); |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 208 | } else { |
Greg Daniel | 2e967df | 2021-02-08 10:38:31 -0500 | [diff] [blame] | 209 | GrResourceProvider* resourceProvider = gpu->getContext()->priv().resourceProvider(); |
| 210 | sk_sp<GrGpuBuffer> transferBuffer = resourceProvider->createBuffer( |
| 211 | size, GrGpuBufferType::kXferCpuToGpu, kDynamic_GrAccessPattern, src); |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 212 | if (!transferBuffer) { |
| 213 | return; |
| 214 | } |
| 215 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 216 | gpu->copyBuffer(std::move(transferBuffer), sk_ref_sp(this), /*srcOffset=*/0, |
| 217 | /*dstOffset=*/0, size); |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 218 | } |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 219 | |
| 220 | this->addMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, |
| 221 | buffer_type_to_access_flags(this->intendedType()), |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 222 | VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 223 | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 224 | /*byRegion=*/false); |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 225 | } |
| 226 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 227 | void GrVkBuffer::addMemoryBarrier(VkAccessFlags srcAccessMask, |
| 228 | VkAccessFlags dstAccesMask, |
| 229 | VkPipelineStageFlags srcStageMask, |
| 230 | VkPipelineStageFlags dstStageMask, |
| 231 | bool byRegion) const { |
| 232 | VkBufferMemoryBarrier bufferMemoryBarrier = { |
| 233 | VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType |
| 234 | nullptr, // pNext |
| 235 | srcAccessMask, // srcAccessMask |
| 236 | dstAccesMask, // dstAccessMask |
| 237 | VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex |
| 238 | VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex |
| 239 | fBuffer, // buffer |
| 240 | 0, // offset |
| 241 | this->size(), // size |
| 242 | }; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 243 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 244 | // TODO: restrict to area of buffer we're interested in |
| 245 | this->getVkGpu()->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion, |
| 246 | &bufferMemoryBarrier); |
| 247 | } |
Greg Daniel | e35a99e | 2018-03-02 11:44:22 -0500 | [diff] [blame] | 248 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 249 | void GrVkBuffer::vkRelease() { |
| 250 | if (this->wasDestroyed()) { |
| 251 | return; |
| 252 | } |
| 253 | |
| 254 | if (fMapPtr) { |
| 255 | this->vkUnmap(this->size()); |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 256 | fMapPtr = nullptr; |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 257 | } |
| 258 | |
| 259 | if (fUniformDescriptorSet) { |
| 260 | fUniformDescriptorSet->recycle(); |
| 261 | fUniformDescriptorSet = nullptr; |
| 262 | } |
| 263 | |
| 264 | SkASSERT(fBuffer); |
| 265 | SkASSERT(fAlloc.fMemory && fAlloc.fBackendMemory); |
| 266 | VK_CALL(this->getVkGpu(), DestroyBuffer(this->getVkGpu()->device(), fBuffer, nullptr)); |
| 267 | fBuffer = VK_NULL_HANDLE; |
| 268 | |
| 269 | GrVkMemory::FreeBufferMemory(this->getVkGpu(), fAlloc); |
| 270 | fAlloc.fMemory = VK_NULL_HANDLE; |
| 271 | fAlloc.fBackendMemory = 0; |
| 272 | } |
| 273 | |
| 274 | void GrVkBuffer::onRelease() { |
| 275 | this->vkRelease(); |
| 276 | this->GrGpuBuffer::onRelease(); |
| 277 | } |
| 278 | |
| 279 | void GrVkBuffer::onAbandon() { |
| 280 | this->vkRelease(); |
| 281 | this->GrGpuBuffer::onAbandon(); |
| 282 | } |
| 283 | |
| 284 | void GrVkBuffer::onMap() { |
| 285 | if (!this->wasDestroyed()) { |
| 286 | this->vkMap(this->size()); |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 287 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 288 | } |
| 289 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 290 | void GrVkBuffer::onUnmap() { |
| 291 | if (!this->wasDestroyed()) { |
| 292 | this->vkUnmap(this->size()); |
| 293 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 294 | } |
| 295 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 296 | bool GrVkBuffer::onUpdateData(const void* src, size_t srcSizeInBytes) { |
| 297 | if (this->wasDestroyed()) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 298 | return false; |
| 299 | } |
| 300 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 301 | if (srcSizeInBytes > this->size()) { |
| 302 | return false; |
| 303 | } |
| 304 | |
| 305 | if (this->isVkMappable()) { |
| 306 | this->vkMap(srcSizeInBytes); |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 307 | if (!fMapPtr) { |
| 308 | return false; |
| 309 | } |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 310 | memcpy(fMapPtr, src, srcSizeInBytes); |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 311 | this->vkUnmap(srcSizeInBytes); |
| 312 | fMapPtr = nullptr; |
Greg Daniel | 78e6a4c | 2019-03-19 14:13:36 -0400 | [diff] [blame] | 313 | } else { |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 314 | this->copyCpuDataToGpuBuffer(src, srcSizeInBytes); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 315 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 316 | return true; |
| 317 | } |
| 318 | |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 319 | GrVkGpu* GrVkBuffer::getVkGpu() const { |
| 320 | SkASSERT(!this->wasDestroyed()); |
| 321 | return static_cast<GrVkGpu*>(this->getGpu()); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 322 | } |
Greg Daniel | af1d193 | 2021-02-08 13:55:26 -0500 | [diff] [blame] | 323 | |
| 324 | const VkDescriptorSet* GrVkBuffer::uniformDescriptorSet() const { |
| 325 | SkASSERT(fUniformDescriptorSet); |
| 326 | return fUniformDescriptorSet->descriptorSet(); |
| 327 | } |
| 328 | |