Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrVkBuffer.h" |
| 9 | #include "GrVkGpu.h" |
| 10 | #include "GrVkMemory.h" |
| 11 | #include "GrVkUtil.h" |
| 12 | |
| 13 | #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X) |
| 14 | |
| 15 | #ifdef SK_DEBUG |
| 16 | #define VALIDATE() this->validate() |
| 17 | #else |
| 18 | #define VALIDATE() do {} while(false) |
| 19 | #endif |
| 20 | |
| 21 | const GrVkBuffer::Resource* GrVkBuffer::Create(const GrVkGpu* gpu, const Desc& desc) { |
| 22 | VkBuffer buffer; |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 23 | GrVkAlloc alloc; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 24 | |
| 25 | // create the buffer object |
| 26 | VkBufferCreateInfo bufInfo; |
| 27 | memset(&bufInfo, 0, sizeof(VkBufferCreateInfo)); |
| 28 | bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; |
| 29 | bufInfo.flags = 0; |
| 30 | bufInfo.size = desc.fSizeInBytes; |
| 31 | switch (desc.fType) { |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 32 | case kVertex_Type: |
| 33 | bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; |
| 34 | break; |
| 35 | case kIndex_Type: |
| 36 | bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT; |
| 37 | break; |
| 38 | case kUniform_Type: |
| 39 | bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT; |
| 40 | break; |
| 41 | case kCopyRead_Type: |
| 42 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; |
| 43 | break; |
| 44 | case kCopyWrite_Type: |
| 45 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 46 | break; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 47 | } |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 48 | if (!desc.fDynamic) { |
| 49 | bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT; |
| 50 | } |
| 51 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 52 | bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; |
| 53 | bufInfo.queueFamilyIndexCount = 0; |
| 54 | bufInfo.pQueueFamilyIndices = nullptr; |
| 55 | |
| 56 | VkResult err; |
| 57 | err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer)); |
| 58 | if (err) { |
| 59 | return nullptr; |
| 60 | } |
| 61 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 62 | if (!GrVkMemory::AllocAndBindBufferMemory(gpu, |
| 63 | buffer, |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 64 | desc.fType, |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 65 | desc.fDynamic, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 66 | &alloc)) { |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 67 | return nullptr; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 68 | } |
| 69 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 70 | const GrVkBuffer::Resource* resource = new GrVkBuffer::Resource(buffer, alloc, desc.fType); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 71 | if (!resource) { |
| 72 | VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 73 | GrVkMemory::FreeBufferMemory(gpu, desc.fType, alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 74 | return nullptr; |
| 75 | } |
| 76 | |
| 77 | return resource; |
| 78 | } |
| 79 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 80 | void GrVkBuffer::addMemoryBarrier(const GrVkGpu* gpu, |
| 81 | VkAccessFlags srcAccessMask, |
| 82 | VkAccessFlags dstAccesMask, |
| 83 | VkPipelineStageFlags srcStageMask, |
| 84 | VkPipelineStageFlags dstStageMask, |
| 85 | bool byRegion) const { |
| 86 | VkBufferMemoryBarrier bufferMemoryBarrier = { |
| 87 | VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType |
| 88 | NULL, // pNext |
| 89 | srcAccessMask, // srcAccessMask |
| 90 | dstAccesMask, // dstAccessMask |
| 91 | VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex |
| 92 | VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex |
| 93 | this->buffer(), // buffer |
| 94 | 0, // offset |
| 95 | fDesc.fSizeInBytes, // size |
| 96 | }; |
| 97 | |
| 98 | // TODO: restrict to area of buffer we're interested in |
| 99 | gpu->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion, &bufferMemoryBarrier); |
| 100 | } |
| 101 | |
| 102 | void GrVkBuffer::Resource::freeGPUData(const GrVkGpu* gpu) const { |
| 103 | SkASSERT(fBuffer); |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 104 | SkASSERT(fAlloc.fMemory); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 105 | VK_CALL(gpu, DestroyBuffer(gpu->device(), fBuffer, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 106 | GrVkMemory::FreeBufferMemory(gpu, fType, fAlloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | void GrVkBuffer::vkRelease(const GrVkGpu* gpu) { |
| 110 | VALIDATE(); |
jvanverth | 4c6e47a | 2016-07-22 10:34:52 -0700 | [diff] [blame] | 111 | fResource->recycle(const_cast<GrVkGpu*>(gpu)); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 112 | fResource = nullptr; |
| 113 | fMapPtr = nullptr; |
| 114 | VALIDATE(); |
| 115 | } |
| 116 | |
| 117 | void GrVkBuffer::vkAbandon() { |
| 118 | fResource->unrefAndAbandon(); |
jvanverth | af236b5 | 2016-05-20 06:01:06 -0700 | [diff] [blame] | 119 | fResource = nullptr; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 120 | fMapPtr = nullptr; |
| 121 | VALIDATE(); |
| 122 | } |
| 123 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 124 | VkAccessFlags buffer_type_to_access_flags(GrVkBuffer::Type type) { |
| 125 | switch (type) { |
| 126 | case GrVkBuffer::kIndex_Type: |
| 127 | return VK_ACCESS_INDEX_READ_BIT; |
| 128 | case GrVkBuffer::kVertex_Type: |
| 129 | return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; |
| 130 | default: |
| 131 | // This helper is only called for static buffers so we should only ever see index or |
| 132 | // vertex buffers types |
| 133 | SkASSERT(false); |
| 134 | return 0; |
| 135 | } |
| 136 | } |
| 137 | |
| 138 | void GrVkBuffer::internalMap(GrVkGpu* gpu, size_t size, bool* createdNewBuffer) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 139 | VALIDATE(); |
| 140 | SkASSERT(!this->vkIsMapped()); |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 141 | |
jvanverth | dbb429a | 2016-03-16 06:47:39 -0700 | [diff] [blame] | 142 | if (!fResource->unique()) { |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 143 | if (fDesc.fDynamic) { |
| 144 | // in use by the command buffer, so we need to create a new one |
| 145 | fResource->recycle(gpu); |
| 146 | fResource = this->createResource(gpu, fDesc); |
| 147 | if (createdNewBuffer) { |
| 148 | *createdNewBuffer = true; |
| 149 | } |
| 150 | } else { |
| 151 | SkASSERT(fMapPtr); |
| 152 | this->addMemoryBarrier(gpu, |
| 153 | buffer_type_to_access_flags(fDesc.fType), |
| 154 | VK_ACCESS_TRANSFER_WRITE_BIT, |
| 155 | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, |
| 156 | VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 157 | false); |
| 158 | } |
jvanverth | dbb429a | 2016-03-16 06:47:39 -0700 | [diff] [blame] | 159 | } |
jvanverth | 910114a | 2016-03-08 12:09:27 -0800 | [diff] [blame] | 160 | |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 161 | if (fDesc.fDynamic) { |
| 162 | const GrVkAlloc& alloc = this->alloc(); |
jvanverth | db37909 | 2016-07-07 11:18:46 -0700 | [diff] [blame] | 163 | VkResult err = VK_CALL(gpu, MapMemory(gpu->device(), alloc.fMemory, |
| 164 | alloc.fOffset + fOffset, |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 165 | size, 0, &fMapPtr)); |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 166 | if (err) { |
| 167 | fMapPtr = nullptr; |
| 168 | } |
| 169 | } else { |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 170 | if (!fMapPtr) { |
| 171 | fMapPtr = new unsigned char[this->size()]; |
| 172 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 173 | } |
| 174 | |
| 175 | VALIDATE(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 176 | } |
| 177 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 178 | void GrVkBuffer::internalUnmap(GrVkGpu* gpu, size_t size) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 179 | VALIDATE(); |
| 180 | SkASSERT(this->vkIsMapped()); |
| 181 | |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 182 | if (fDesc.fDynamic) { |
jvanverth | 9d54afc | 2016-09-20 09:20:03 -0700 | [diff] [blame^] | 183 | GrVkMemory::FlushMappedAlloc(gpu, this->alloc()); |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 184 | VK_CALL(gpu, UnmapMemory(gpu->device(), this->alloc().fMemory)); |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 185 | fMapPtr = nullptr; |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 186 | } else { |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 187 | gpu->updateBuffer(this, fMapPtr, this->offset(), size); |
| 188 | this->addMemoryBarrier(gpu, |
| 189 | VK_ACCESS_TRANSFER_WRITE_BIT, |
| 190 | buffer_type_to_access_flags(fDesc.fType), |
| 191 | VK_PIPELINE_STAGE_TRANSFER_BIT, |
| 192 | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, |
| 193 | false); |
jvanverth | 069c464 | 2016-07-06 12:56:11 -0700 | [diff] [blame] | 194 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 195 | } |
| 196 | |
| 197 | bool GrVkBuffer::vkIsMapped() const { |
| 198 | VALIDATE(); |
| 199 | return SkToBool(fMapPtr); |
| 200 | } |
| 201 | |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 202 | bool GrVkBuffer::vkUpdateData(GrVkGpu* gpu, const void* src, size_t srcSizeInBytes, |
egdaniel | 7cbffda | 2016-04-08 13:27:53 -0700 | [diff] [blame] | 203 | bool* createdNewBuffer) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 204 | if (srcSizeInBytes > fDesc.fSizeInBytes) { |
| 205 | return false; |
| 206 | } |
| 207 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 208 | this->internalMap(gpu, srcSizeInBytes, createdNewBuffer); |
| 209 | if (!fMapPtr) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 210 | return false; |
| 211 | } |
| 212 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 213 | memcpy(fMapPtr, src, srcSizeInBytes); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 214 | |
egdaniel | 927ac9c | 2016-09-19 09:32:09 -0700 | [diff] [blame] | 215 | this->internalUnmap(gpu, srcSizeInBytes); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 216 | |
| 217 | return true; |
| 218 | } |
| 219 | |
| 220 | void GrVkBuffer::validate() const { |
| 221 | SkASSERT(!fResource || kVertex_Type == fDesc.fType || kIndex_Type == fDesc.fType |
| 222 | || kCopyRead_Type == fDesc.fType || kCopyWrite_Type == fDesc.fType |
| 223 | || kUniform_Type == fDesc.fType); |
| 224 | } |