blob: 82674b4cfbfe04f6d6f29ee8988d9aacfd7a7c08 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkBuffer.h"
9#include "GrVkGpu.h"
10#include "GrVkMemory.h"
11#include "GrVkUtil.h"
12
13#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
14
15#ifdef SK_DEBUG
16#define VALIDATE() this->validate()
17#else
18#define VALIDATE() do {} while(false)
19#endif
20
21const GrVkBuffer::Resource* GrVkBuffer::Create(const GrVkGpu* gpu, const Desc& desc) {
22 VkBuffer buffer;
jvanverth1e305ba2016-06-01 09:39:15 -070023 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -050024
25 // create the buffer object
26 VkBufferCreateInfo bufInfo;
27 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
28 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
29 bufInfo.flags = 0;
30 bufInfo.size = desc.fSizeInBytes;
31 switch (desc.fType) {
jvanvertha584de92016-06-30 09:10:52 -070032 case kVertex_Type:
33 bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
34 break;
35 case kIndex_Type:
36 bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
37 break;
38 case kUniform_Type:
39 bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
40 break;
41 case kCopyRead_Type:
42 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
43 break;
44 case kCopyWrite_Type:
45 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
46 break;
Greg Daniel164a9f02016-02-22 09:56:40 -050047 }
jvanvertha584de92016-06-30 09:10:52 -070048 if (!desc.fDynamic) {
49 bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
50 }
51
Greg Daniel164a9f02016-02-22 09:56:40 -050052 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
53 bufInfo.queueFamilyIndexCount = 0;
54 bufInfo.pQueueFamilyIndices = nullptr;
55
56 VkResult err;
57 err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer));
58 if (err) {
59 return nullptr;
60 }
61
Greg Daniel164a9f02016-02-22 09:56:40 -050062 if (!GrVkMemory::AllocAndBindBufferMemory(gpu,
63 buffer,
jvanverth6b6ffc42016-06-13 14:28:07 -070064 desc.fType,
jvanvertha584de92016-06-30 09:10:52 -070065 desc.fDynamic,
Greg Daniel164a9f02016-02-22 09:56:40 -050066 &alloc)) {
jvanverth6b6ffc42016-06-13 14:28:07 -070067 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -050068 }
69
jvanverth6b6ffc42016-06-13 14:28:07 -070070 const GrVkBuffer::Resource* resource = new GrVkBuffer::Resource(buffer, alloc, desc.fType);
Greg Daniel164a9f02016-02-22 09:56:40 -050071 if (!resource) {
72 VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -070073 GrVkMemory::FreeBufferMemory(gpu, desc.fType, alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050074 return nullptr;
75 }
76
77 return resource;
78}
79
Greg Daniel164a9f02016-02-22 09:56:40 -050080void GrVkBuffer::addMemoryBarrier(const GrVkGpu* gpu,
81 VkAccessFlags srcAccessMask,
82 VkAccessFlags dstAccesMask,
83 VkPipelineStageFlags srcStageMask,
84 VkPipelineStageFlags dstStageMask,
85 bool byRegion) const {
86 VkBufferMemoryBarrier bufferMemoryBarrier = {
87 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType
88 NULL, // pNext
89 srcAccessMask, // srcAccessMask
90 dstAccesMask, // dstAccessMask
91 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
92 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
93 this->buffer(), // buffer
94 0, // offset
95 fDesc.fSizeInBytes, // size
96 };
97
98 // TODO: restrict to area of buffer we're interested in
99 gpu->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion, &bufferMemoryBarrier);
100}
101
102void GrVkBuffer::Resource::freeGPUData(const GrVkGpu* gpu) const {
103 SkASSERT(fBuffer);
jvanverth1e305ba2016-06-01 09:39:15 -0700104 SkASSERT(fAlloc.fMemory);
Greg Daniel164a9f02016-02-22 09:56:40 -0500105 VK_CALL(gpu, DestroyBuffer(gpu->device(), fBuffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700106 GrVkMemory::FreeBufferMemory(gpu, fType, fAlloc);
Greg Daniel164a9f02016-02-22 09:56:40 -0500107}
108
109void GrVkBuffer::vkRelease(const GrVkGpu* gpu) {
110 VALIDATE();
jvanverth4c6e47a2016-07-22 10:34:52 -0700111 fResource->recycle(const_cast<GrVkGpu*>(gpu));
Greg Daniel164a9f02016-02-22 09:56:40 -0500112 fResource = nullptr;
113 fMapPtr = nullptr;
114 VALIDATE();
115}
116
117void GrVkBuffer::vkAbandon() {
118 fResource->unrefAndAbandon();
jvanverthaf236b52016-05-20 06:01:06 -0700119 fResource = nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -0500120 fMapPtr = nullptr;
121 VALIDATE();
122}
123
egdaniel927ac9c2016-09-19 09:32:09 -0700124VkAccessFlags buffer_type_to_access_flags(GrVkBuffer::Type type) {
125 switch (type) {
126 case GrVkBuffer::kIndex_Type:
127 return VK_ACCESS_INDEX_READ_BIT;
128 case GrVkBuffer::kVertex_Type:
129 return VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
130 default:
131 // This helper is only called for static buffers so we should only ever see index or
132 // vertex buffers types
133 SkASSERT(false);
134 return 0;
135 }
136}
137
138void GrVkBuffer::internalMap(GrVkGpu* gpu, size_t size, bool* createdNewBuffer) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500139 VALIDATE();
140 SkASSERT(!this->vkIsMapped());
egdaniel927ac9c2016-09-19 09:32:09 -0700141
jvanverthdbb429a2016-03-16 06:47:39 -0700142 if (!fResource->unique()) {
egdaniel927ac9c2016-09-19 09:32:09 -0700143 if (fDesc.fDynamic) {
144 // in use by the command buffer, so we need to create a new one
145 fResource->recycle(gpu);
146 fResource = this->createResource(gpu, fDesc);
147 if (createdNewBuffer) {
148 *createdNewBuffer = true;
149 }
150 } else {
151 SkASSERT(fMapPtr);
152 this->addMemoryBarrier(gpu,
153 buffer_type_to_access_flags(fDesc.fType),
154 VK_ACCESS_TRANSFER_WRITE_BIT,
155 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
156 VK_PIPELINE_STAGE_TRANSFER_BIT,
157 false);
158 }
jvanverthdbb429a2016-03-16 06:47:39 -0700159 }
jvanverth910114a2016-03-08 12:09:27 -0800160
jvanverth069c4642016-07-06 12:56:11 -0700161 if (fDesc.fDynamic) {
162 const GrVkAlloc& alloc = this->alloc();
jvanverthdb379092016-07-07 11:18:46 -0700163 VkResult err = VK_CALL(gpu, MapMemory(gpu->device(), alloc.fMemory,
164 alloc.fOffset + fOffset,
egdaniel927ac9c2016-09-19 09:32:09 -0700165 size, 0, &fMapPtr));
jvanverth069c4642016-07-06 12:56:11 -0700166 if (err) {
167 fMapPtr = nullptr;
168 }
169 } else {
egdaniel927ac9c2016-09-19 09:32:09 -0700170 if (!fMapPtr) {
171 fMapPtr = new unsigned char[this->size()];
172 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500173 }
174
175 VALIDATE();
Greg Daniel164a9f02016-02-22 09:56:40 -0500176}
177
egdaniel927ac9c2016-09-19 09:32:09 -0700178void GrVkBuffer::internalUnmap(GrVkGpu* gpu, size_t size) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500179 VALIDATE();
180 SkASSERT(this->vkIsMapped());
181
jvanverth069c4642016-07-06 12:56:11 -0700182 if (fDesc.fDynamic) {
jvanverth9d54afc2016-09-20 09:20:03 -0700183 GrVkMemory::FlushMappedAlloc(gpu, this->alloc());
jvanverth069c4642016-07-06 12:56:11 -0700184 VK_CALL(gpu, UnmapMemory(gpu->device(), this->alloc().fMemory));
egdaniel927ac9c2016-09-19 09:32:09 -0700185 fMapPtr = nullptr;
jvanverth069c4642016-07-06 12:56:11 -0700186 } else {
egdaniel927ac9c2016-09-19 09:32:09 -0700187 gpu->updateBuffer(this, fMapPtr, this->offset(), size);
188 this->addMemoryBarrier(gpu,
189 VK_ACCESS_TRANSFER_WRITE_BIT,
190 buffer_type_to_access_flags(fDesc.fType),
191 VK_PIPELINE_STAGE_TRANSFER_BIT,
192 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
193 false);
jvanverth069c4642016-07-06 12:56:11 -0700194 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500195}
196
197bool GrVkBuffer::vkIsMapped() const {
198 VALIDATE();
199 return SkToBool(fMapPtr);
200}
201
jvanvertha584de92016-06-30 09:10:52 -0700202bool GrVkBuffer::vkUpdateData(GrVkGpu* gpu, const void* src, size_t srcSizeInBytes,
egdaniel7cbffda2016-04-08 13:27:53 -0700203 bool* createdNewBuffer) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500204 if (srcSizeInBytes > fDesc.fSizeInBytes) {
205 return false;
206 }
207
egdaniel927ac9c2016-09-19 09:32:09 -0700208 this->internalMap(gpu, srcSizeInBytes, createdNewBuffer);
209 if (!fMapPtr) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500210 return false;
211 }
212
egdaniel927ac9c2016-09-19 09:32:09 -0700213 memcpy(fMapPtr, src, srcSizeInBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500214
egdaniel927ac9c2016-09-19 09:32:09 -0700215 this->internalUnmap(gpu, srcSizeInBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500216
217 return true;
218}
219
220void GrVkBuffer::validate() const {
221 SkASSERT(!fResource || kVertex_Type == fDesc.fType || kIndex_Type == fDesc.fType
222 || kCopyRead_Type == fDesc.fType || kCopyWrite_Type == fDesc.fType
223 || kUniform_Type == fDesc.fType);
224}