blob: 644d7d41980e5ad5cbd06186cc1421c629ce2c09 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#ifndef GrVkMemory_DEFINED
9#define GrVkMemory_DEFINED
10
jvanverth6b6ffc42016-06-13 14:28:07 -070011#include "GrVkBuffer.h"
12#include "SkTArray.h"
13#include "SkTLList.h"
jvanverthe50f3e72016-03-28 07:03:06 -070014#include "vk/GrVkDefines.h"
jvanverth1e305ba2016-06-01 09:39:15 -070015#include "vk/GrVkTypes.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016
17class GrVkGpu;
18
19namespace GrVkMemory {
20 /**
21 * Allocates vulkan device memory and binds it to the gpu's device for the given object.
jvanverth6b6ffc42016-06-13 14:28:07 -070022 * Returns true if allocation succeeded.
Greg Daniel164a9f02016-02-22 09:56:40 -050023 */
24 bool AllocAndBindBufferMemory(const GrVkGpu* gpu,
25 VkBuffer buffer,
jvanverth6b6ffc42016-06-13 14:28:07 -070026 GrVkBuffer::Type type,
jvanvertha584de92016-06-30 09:10:52 -070027 bool dynamic,
jvanverth1e305ba2016-06-01 09:39:15 -070028 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -070029 void FreeBufferMemory(const GrVkGpu* gpu, GrVkBuffer::Type type, const GrVkAlloc& alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050030
31 bool AllocAndBindImageMemory(const GrVkGpu* gpu,
32 VkImage image,
jvanverth6b6ffc42016-06-13 14:28:07 -070033 bool linearTiling,
jvanverth1e305ba2016-06-01 09:39:15 -070034 GrVkAlloc* alloc);
jvanverth6b6ffc42016-06-13 14:28:07 -070035 void FreeImageMemory(const GrVkGpu* gpu, bool linearTiling, const GrVkAlloc& alloc);
Greg Daniel164a9f02016-02-22 09:56:40 -050036
37 VkPipelineStageFlags LayoutToPipelineStageFlags(const VkImageLayout layout);
38
39 VkAccessFlags LayoutToSrcAccessMask(const VkImageLayout layout);
40}
41
jvanverth82356cc2016-07-07 07:16:42 -070042class GrVkFreeListAlloc {
jvanverth6b6ffc42016-06-13 14:28:07 -070043public:
jvanverth82356cc2016-07-07 07:16:42 -070044 GrVkFreeListAlloc(VkDeviceSize size, VkDeviceSize alignment)
45 : fSize(size)
46 , fAlignment(alignment)
47 , fFreeSize(size)
48 , fLargestBlockSize(size)
49 , fLargestBlockOffset(0) {
50 Block* block = fFreeList.addToTail();
51 block->fOffset = 0;
52 block->fSize = fSize;
53 }
54 ~GrVkFreeListAlloc() {
55 this->reset();
56 }
jvanverth6b6ffc42016-06-13 14:28:07 -070057
jvanverth6b6ffc42016-06-13 14:28:07 -070058 VkDeviceSize size() const { return fSize; }
59 VkDeviceSize alignment() const { return fAlignment; }
60 VkDeviceSize freeSize() const { return fFreeSize; }
61 VkDeviceSize largestBlockSize() const { return fLargestBlockSize; }
jvanverth6b6ffc42016-06-13 14:28:07 -070062
63 bool unallocated() const { return fSize == fFreeSize; }
64
jvanverth82356cc2016-07-07 07:16:42 -070065protected:
66 bool alloc(VkDeviceSize requestedSize, VkDeviceSize* allocOffset, VkDeviceSize* allocSize);
67 void free(VkDeviceSize allocOffset, VkDeviceSize allocSize);
jvanverth6b6ffc42016-06-13 14:28:07 -070068
jvanverth82356cc2016-07-07 07:16:42 -070069 void reset() {
70 fSize = 0;
71 fAlignment = 0;
72 fFreeSize = 0;
73 fLargestBlockSize = 0;
74 fFreeList.reset();
75 }
76
jvanverth6b6ffc42016-06-13 14:28:07 -070077 struct Block {
78 VkDeviceSize fOffset;
79 VkDeviceSize fSize;
80 };
81 typedef SkTLList<Block, 16> FreeList;
82
jvanverth6b6ffc42016-06-13 14:28:07 -070083 VkDeviceSize fSize;
84 VkDeviceSize fAlignment;
85 VkDeviceSize fFreeSize;
86 VkDeviceSize fLargestBlockSize;
87 VkDeviceSize fLargestBlockOffset;
jvanverth6b6ffc42016-06-13 14:28:07 -070088 FreeList fFreeList;
89};
90
jvanverth82356cc2016-07-07 07:16:42 -070091class GrVkSubHeap : public GrVkFreeListAlloc {
92public:
93 GrVkSubHeap(const GrVkGpu* gpu, uint32_t memoryTypeIndex,
94 VkDeviceSize size, VkDeviceSize alignment);
95 ~GrVkSubHeap();
96
97 uint32_t memoryTypeIndex() const { return fMemoryTypeIndex; }
98 VkDeviceMemory memory() { return fAlloc; }
99
100 bool alloc(VkDeviceSize requestedSize, GrVkAlloc* alloc);
101 void free(const GrVkAlloc& alloc);
102
103private:
104 const GrVkGpu* fGpu;
105 uint32_t fMemoryTypeIndex;
106 VkDeviceMemory fAlloc;
107
108 typedef GrVkFreeListAlloc INHERITED;
109};
110
jvanverth6b6ffc42016-06-13 14:28:07 -0700111class GrVkHeap {
112public:
113 enum Strategy {
114 kSubAlloc_Strategy, // alloc large subheaps and suballoc within them
115 kSingleAlloc_Strategy // alloc/recycle an individual subheap per object
116 };
117
118 GrVkHeap(const GrVkGpu* gpu, Strategy strategy, VkDeviceSize subHeapSize)
119 : fGpu(gpu)
120 , fSubHeapSize(subHeapSize)
121 , fAllocSize(0)
122 , fUsedSize(0) {
123 if (strategy == kSubAlloc_Strategy) {
124 fAllocFunc = &GrVkHeap::subAlloc;
125 } else {
126 fAllocFunc = &GrVkHeap::singleAlloc;
127 }
128 }
129
jvanverth82356cc2016-07-07 07:16:42 -0700130 ~GrVkHeap() {}
jvanverth6b6ffc42016-06-13 14:28:07 -0700131
jvanverthd6f80342016-06-16 04:42:30 -0700132 VkDeviceSize allocSize() const { return fAllocSize; }
133 VkDeviceSize usedSize() const { return fUsedSize; }
134
jvanverth6b6ffc42016-06-13 14:28:07 -0700135 bool alloc(VkDeviceSize size, VkDeviceSize alignment, uint32_t memoryTypeIndex,
136 GrVkAlloc* alloc) {
jvanverth6dc3af42016-06-16 14:05:09 -0700137 SkASSERT(size > 0);
jvanverth6b6ffc42016-06-13 14:28:07 -0700138 return (*this.*fAllocFunc)(size, alignment, memoryTypeIndex, alloc);
139 }
140 bool free(const GrVkAlloc& alloc);
141
142private:
143 typedef bool (GrVkHeap::*AllocFunc)(VkDeviceSize size, VkDeviceSize alignment,
144 uint32_t memoryTypeIndex, GrVkAlloc* alloc);
145
146 bool subAlloc(VkDeviceSize size, VkDeviceSize alignment,
147 uint32_t memoryTypeIndex, GrVkAlloc* alloc);
148 bool singleAlloc(VkDeviceSize size, VkDeviceSize alignment,
149 uint32_t memoryTypeIndex, GrVkAlloc* alloc);
150
151 const GrVkGpu* fGpu;
152 VkDeviceSize fSubHeapSize;
153 VkDeviceSize fAllocSize;
154 VkDeviceSize fUsedSize;
155 AllocFunc fAllocFunc;
156 SkTArray<SkAutoTDelete<GrVkSubHeap>> fSubHeaps;
157};
jvanverthe50f3e72016-03-28 07:03:06 -0700158#endif