blob: 1c7c4d0e25358f03631a9a6ed290cadff4ee9c6d [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkBuffer.h"
9#include "GrVkGpu.h"
10#include "GrVkMemory.h"
11#include "GrVkUtil.h"
12
13#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
14
15#ifdef SK_DEBUG
16#define VALIDATE() this->validate()
17#else
18#define VALIDATE() do {} while(false)
19#endif
20
21const GrVkBuffer::Resource* GrVkBuffer::Create(const GrVkGpu* gpu, const Desc& desc) {
22 VkBuffer buffer;
jvanverth1e305ba2016-06-01 09:39:15 -070023 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -050024
25 // create the buffer object
26 VkBufferCreateInfo bufInfo;
27 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
28 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
29 bufInfo.flags = 0;
30 bufInfo.size = desc.fSizeInBytes;
31 switch (desc.fType) {
32 case kVertex_Type:
33 bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
34 break;
35 case kIndex_Type:
36 bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
37 break;
38 case kUniform_Type:
39 bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
40 break;
41 case kCopyRead_Type:
42 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
43 break;
44 case kCopyWrite_Type:
45 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
46 break;
47
48 }
49 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
50 bufInfo.queueFamilyIndexCount = 0;
51 bufInfo.pQueueFamilyIndices = nullptr;
52
53 VkResult err;
54 err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer));
55 if (err) {
56 return nullptr;
57 }
58
Greg Daniel164a9f02016-02-22 09:56:40 -050059 if (!GrVkMemory::AllocAndBindBufferMemory(gpu,
60 buffer,
jvanverth6b6ffc42016-06-13 14:28:07 -070061 desc.fType,
Greg Daniel164a9f02016-02-22 09:56:40 -050062 &alloc)) {
jvanverth6b6ffc42016-06-13 14:28:07 -070063 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -050064 }
65
jvanverth6b6ffc42016-06-13 14:28:07 -070066 const GrVkBuffer::Resource* resource = new GrVkBuffer::Resource(buffer, alloc, desc.fType);
Greg Daniel164a9f02016-02-22 09:56:40 -050067 if (!resource) {
68 VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -070069 GrVkMemory::FreeBufferMemory(gpu, desc.fType, alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050070 return nullptr;
71 }
72
73 return resource;
74}
75
Greg Daniel164a9f02016-02-22 09:56:40 -050076void GrVkBuffer::addMemoryBarrier(const GrVkGpu* gpu,
77 VkAccessFlags srcAccessMask,
78 VkAccessFlags dstAccesMask,
79 VkPipelineStageFlags srcStageMask,
80 VkPipelineStageFlags dstStageMask,
81 bool byRegion) const {
82 VkBufferMemoryBarrier bufferMemoryBarrier = {
83 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // sType
84 NULL, // pNext
85 srcAccessMask, // srcAccessMask
86 dstAccesMask, // dstAccessMask
87 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
88 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
89 this->buffer(), // buffer
90 0, // offset
91 fDesc.fSizeInBytes, // size
92 };
93
94 // TODO: restrict to area of buffer we're interested in
95 gpu->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion, &bufferMemoryBarrier);
96}
97
98void GrVkBuffer::Resource::freeGPUData(const GrVkGpu* gpu) const {
99 SkASSERT(fBuffer);
jvanverth1e305ba2016-06-01 09:39:15 -0700100 SkASSERT(fAlloc.fMemory);
Greg Daniel164a9f02016-02-22 09:56:40 -0500101 VK_CALL(gpu, DestroyBuffer(gpu->device(), fBuffer, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700102 GrVkMemory::FreeBufferMemory(gpu, fType, fAlloc);
Greg Daniel164a9f02016-02-22 09:56:40 -0500103}
104
105void GrVkBuffer::vkRelease(const GrVkGpu* gpu) {
106 VALIDATE();
107 fResource->unref(gpu);
108 fResource = nullptr;
109 fMapPtr = nullptr;
110 VALIDATE();
111}
112
113void GrVkBuffer::vkAbandon() {
114 fResource->unrefAndAbandon();
jvanverthaf236b52016-05-20 06:01:06 -0700115 fResource = nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -0500116 fMapPtr = nullptr;
117 VALIDATE();
118}
119
120void* GrVkBuffer::vkMap(const GrVkGpu* gpu) {
121 VALIDATE();
122 SkASSERT(!this->vkIsMapped());
123
jvanverthdbb429a2016-03-16 06:47:39 -0700124 if (!fResource->unique()) {
125 // in use by the command buffer, so we need to create a new one
126 fResource->unref(gpu);
127 fResource = Create(gpu, fDesc);
128 }
jvanverth910114a2016-03-08 12:09:27 -0800129
jvanverth1e305ba2016-06-01 09:39:15 -0700130 const GrVkAlloc& alloc = this->alloc();
131 VkResult err = VK_CALL(gpu, MapMemory(gpu->device(), alloc.fMemory, alloc.fOffset,
132 VK_WHOLE_SIZE, 0, &fMapPtr));
Greg Daniel164a9f02016-02-22 09:56:40 -0500133 if (err) {
134 fMapPtr = nullptr;
135 }
136
137 VALIDATE();
138 return fMapPtr;
139}
140
141void GrVkBuffer::vkUnmap(const GrVkGpu* gpu) {
142 VALIDATE();
143 SkASSERT(this->vkIsMapped());
144
jvanverth1e305ba2016-06-01 09:39:15 -0700145 VK_CALL(gpu, UnmapMemory(gpu->device(), this->alloc().fMemory));
Greg Daniel164a9f02016-02-22 09:56:40 -0500146
147 fMapPtr = nullptr;
148}
149
150bool GrVkBuffer::vkIsMapped() const {
151 VALIDATE();
152 return SkToBool(fMapPtr);
153}
154
egdaniel7cbffda2016-04-08 13:27:53 -0700155bool GrVkBuffer::vkUpdateData(const GrVkGpu* gpu, const void* src, size_t srcSizeInBytes,
156 bool* createdNewBuffer) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500157 SkASSERT(!this->vkIsMapped());
158 VALIDATE();
159 if (srcSizeInBytes > fDesc.fSizeInBytes) {
160 return false;
161 }
162
jvanverth910114a2016-03-08 12:09:27 -0800163 if (!fResource->unique()) {
164 // in use by the command buffer, so we need to create a new one
165 fResource->unref(gpu);
166 fResource = Create(gpu, fDesc);
egdaniel7cbffda2016-04-08 13:27:53 -0700167 if (createdNewBuffer) {
168 *createdNewBuffer = true;
169 }
jvanverth910114a2016-03-08 12:09:27 -0800170 }
171
Greg Daniel164a9f02016-02-22 09:56:40 -0500172 void* mapPtr;
jvanverth1e305ba2016-06-01 09:39:15 -0700173 const GrVkAlloc& alloc = this->alloc();
174 VkResult err = VK_CALL(gpu, MapMemory(gpu->device(), alloc.fMemory, alloc.fOffset,
175 srcSizeInBytes, 0, &mapPtr));
Greg Daniel164a9f02016-02-22 09:56:40 -0500176
177 if (VK_SUCCESS != err) {
178 return false;
179 }
180
181 memcpy(mapPtr, src, srcSizeInBytes);
182
jvanverth1e305ba2016-06-01 09:39:15 -0700183 VK_CALL(gpu, UnmapMemory(gpu->device(), alloc.fMemory));
Greg Daniel164a9f02016-02-22 09:56:40 -0500184
185 return true;
186}
187
188void GrVkBuffer::validate() const {
189 SkASSERT(!fResource || kVertex_Type == fDesc.fType || kIndex_Type == fDesc.fType
190 || kCopyRead_Type == fDesc.fType || kCopyWrite_Type == fDesc.fType
191 || kUniform_Type == fDesc.fType);
192}