blob: 77267ab48c47b2d059cb57c7afd7ab0d2bbc9c80 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#ifndef GrVkMemory_DEFINED
9#define GrVkMemory_DEFINED
10
jvanverth6b6ffc42016-06-13 14:28:07 -070011#include "GrVkBuffer.h"
12#include "SkTArray.h"
13#include "SkTLList.h"
jvanverthe50f3e72016-03-28 07:03:06 -070014#include "vk/GrVkDefines.h"
jvanverth1e305ba2016-06-01 09:39:15 -070015#include "vk/GrVkTypes.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016
17class GrVkGpu;
18
19namespace GrVkMemory {
20 /**
21 * Allocates vulkan device memory and binds it to the gpu's device for the given object.
jvanverth6b6ffc42016-06-13 14:28:07 -070022 * Returns true if allocation succeeded.
Greg Daniel164a9f02016-02-22 09:56:40 -050023 */
24 bool AllocAndBindBufferMemory(const GrVkGpu* gpu,
25 VkBuffer buffer,
jvanverth6b6ffc42016-06-13 14:28:07 -070026 GrVkBuffer::Type type,
jvanvertha584de92016-06-30 09:10:52 -070027 bool dynamic,
jvanverth1e305ba2016-06-01 09:39:15 -070028 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -070029 void FreeBufferMemory(const GrVkGpu* gpu, GrVkBuffer::Type type, const GrVkAlloc& alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050030
31 bool AllocAndBindImageMemory(const GrVkGpu* gpu,
32 VkImage image,
jvanverth6b6ffc42016-06-13 14:28:07 -070033 bool linearTiling,
jvanverth1e305ba2016-06-01 09:39:15 -070034 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -070035 void FreeImageMemory(const GrVkGpu* gpu, bool linearTiling, const GrVkAlloc& alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050036
37 VkPipelineStageFlags LayoutToPipelineStageFlags(const VkImageLayout layout);
38
39 VkAccessFlags LayoutToSrcAccessMask(const VkImageLayout layout);
jvanverth9d54afc2016-09-20 09:20:03 -070040
41 void FlushMappedAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc);
42 void InvalidateMappedAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050043}
44
jvanverth82356cc2016-07-07 07:16:42 -070045class GrVkFreeListAlloc {
jvanverth6b6ffc42016-06-13 14:28:07 -070046public:
jvanverth82356cc2016-07-07 07:16:42 -070047 GrVkFreeListAlloc(VkDeviceSize size, VkDeviceSize alignment)
48 : fSize(size)
49 , fAlignment(alignment)
50 , fFreeSize(size)
51 , fLargestBlockSize(size)
52 , fLargestBlockOffset(0) {
53 Block* block = fFreeList.addToTail();
54 block->fOffset = 0;
55 block->fSize = fSize;
56 }
57 ~GrVkFreeListAlloc() {
58 this->reset();
59 }
jvanverth6b6ffc42016-06-13 14:28:07 -070060
jvanverth6b6ffc42016-06-13 14:28:07 -070061 VkDeviceSize size() const { return fSize; }
62 VkDeviceSize alignment() const { return fAlignment; }
63 VkDeviceSize freeSize() const { return fFreeSize; }
64 VkDeviceSize largestBlockSize() const { return fLargestBlockSize; }
jvanverth6b6ffc42016-06-13 14:28:07 -070065
66 bool unallocated() const { return fSize == fFreeSize; }
67
jvanverth82356cc2016-07-07 07:16:42 -070068protected:
69 bool alloc(VkDeviceSize requestedSize, VkDeviceSize* allocOffset, VkDeviceSize* allocSize);
70 void free(VkDeviceSize allocOffset, VkDeviceSize allocSize);
jvanverth6b6ffc42016-06-13 14:28:07 -070071
jvanverth82356cc2016-07-07 07:16:42 -070072 void reset() {
73 fSize = 0;
74 fAlignment = 0;
75 fFreeSize = 0;
76 fLargestBlockSize = 0;
77 fFreeList.reset();
78 }
79
jvanverth6b6ffc42016-06-13 14:28:07 -070080 struct Block {
81 VkDeviceSize fOffset;
82 VkDeviceSize fSize;
83 };
84 typedef SkTLList<Block, 16> FreeList;
85
jvanverth6b6ffc42016-06-13 14:28:07 -070086 VkDeviceSize fSize;
87 VkDeviceSize fAlignment;
88 VkDeviceSize fFreeSize;
89 VkDeviceSize fLargestBlockSize;
90 VkDeviceSize fLargestBlockOffset;
jvanverth6b6ffc42016-06-13 14:28:07 -070091 FreeList fFreeList;
92};
93
jvanverth82356cc2016-07-07 07:16:42 -070094class GrVkSubHeap : public GrVkFreeListAlloc {
95public:
jvanverth68c3d302016-09-23 10:30:04 -070096 GrVkSubHeap(const GrVkGpu* gpu, uint32_t memoryTypeIndex, uint32_t heapIndex,
jvanverth82356cc2016-07-07 07:16:42 -070097 VkDeviceSize size, VkDeviceSize alignment);
98 ~GrVkSubHeap();
99
100 uint32_t memoryTypeIndex() const { return fMemoryTypeIndex; }
101 VkDeviceMemory memory() { return fAlloc; }
102
103 bool alloc(VkDeviceSize requestedSize, GrVkAlloc* alloc);
104 void free(const GrVkAlloc& alloc);
105
106private:
107 const GrVkGpu* fGpu;
jvanverth68c3d302016-09-23 10:30:04 -0700108#ifdef SK_DEBUG
109 uint32_t fHeapIndex;
110#endif
jvanverth82356cc2016-07-07 07:16:42 -0700111 uint32_t fMemoryTypeIndex;
112 VkDeviceMemory fAlloc;
113
114 typedef GrVkFreeListAlloc INHERITED;
115};
116
jvanverth6b6ffc42016-06-13 14:28:07 -0700117class GrVkHeap {
118public:
119 enum Strategy {
120 kSubAlloc_Strategy, // alloc large subheaps and suballoc within them
121 kSingleAlloc_Strategy // alloc/recycle an individual subheap per object
122 };
123
124 GrVkHeap(const GrVkGpu* gpu, Strategy strategy, VkDeviceSize subHeapSize)
125 : fGpu(gpu)
126 , fSubHeapSize(subHeapSize)
127 , fAllocSize(0)
128 , fUsedSize(0) {
129 if (strategy == kSubAlloc_Strategy) {
130 fAllocFunc = &GrVkHeap::subAlloc;
131 } else {
132 fAllocFunc = &GrVkHeap::singleAlloc;
133 }
134 }
135
jvanverth82356cc2016-07-07 07:16:42 -0700136 ~GrVkHeap() {}
jvanverth6b6ffc42016-06-13 14:28:07 -0700137
jvanverthd6f80342016-06-16 04:42:30 -0700138 VkDeviceSize allocSize() const { return fAllocSize; }
139 VkDeviceSize usedSize() const { return fUsedSize; }
140
jvanverth68c3d302016-09-23 10:30:04 -0700141 bool alloc(VkDeviceSize size, VkDeviceSize alignment, uint32_t memoryTypeIndex,
142 uint32_t heapIndex, GrVkAlloc* alloc) {
jvanverth6dc3af42016-06-16 14:05:09 -0700143 SkASSERT(size > 0);
jvanverth68c3d302016-09-23 10:30:04 -0700144 return (*this.*fAllocFunc)(size, alignment, memoryTypeIndex, heapIndex, alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -0700145 }
146 bool free(const GrVkAlloc& alloc);
147
148private:
jvanverth68c3d302016-09-23 10:30:04 -0700149 typedef bool (GrVkHeap::*AllocFunc)(VkDeviceSize size, VkDeviceSize alignment,
150 uint32_t memoryTypeIndex, uint32_t heapIndex,
151 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -0700152
jvanverth68c3d302016-09-23 10:30:04 -0700153 bool subAlloc(VkDeviceSize size, VkDeviceSize alignment,
154 uint32_t memoryTypeIndex, uint32_t heapIndex,
155 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -0700156 bool singleAlloc(VkDeviceSize size, VkDeviceSize alignment,
jvanverth68c3d302016-09-23 10:30:04 -0700157 uint32_t memoryTypeIndex, uint32_t heapIndex,
158 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -0700159
160 const GrVkGpu* fGpu;
161 VkDeviceSize fSubHeapSize;
162 VkDeviceSize fAllocSize;
163 VkDeviceSize fUsedSize;
164 AllocFunc fAllocFunc;
Ben Wagner145dbcd2016-11-03 14:40:50 -0400165 SkTArray<std::unique_ptr<GrVkSubHeap>> fSubHeaps;
jvanverth6b6ffc42016-06-13 14:28:07 -0700166};
jvanverthe50f3e72016-03-28 07:03:06 -0700167#endif