Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrVkMemory_DEFINED |
| 9 | #define GrVkMemory_DEFINED |
| 10 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 11 | #include "GrVkBuffer.h" |
| 12 | #include "SkTArray.h" |
| 13 | #include "SkTLList.h" |
jvanverth | e50f3e7 | 2016-03-28 07:03:06 -0700 | [diff] [blame] | 14 | #include "vk/GrVkDefines.h" |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 15 | #include "vk/GrVkTypes.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 16 | |
| 17 | class GrVkGpu; |
| 18 | |
| 19 | namespace GrVkMemory { |
| 20 | /** |
| 21 | * Allocates vulkan device memory and binds it to the gpu's device for the given object. |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 22 | * Returns true if allocation succeeded. |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 23 | */ |
| 24 | bool AllocAndBindBufferMemory(const GrVkGpu* gpu, |
| 25 | VkBuffer buffer, |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 26 | GrVkBuffer::Type type, |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 27 | bool dynamic, |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 28 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 29 | void FreeBufferMemory(const GrVkGpu* gpu, GrVkBuffer::Type type, const GrVkAlloc& alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 30 | |
| 31 | bool AllocAndBindImageMemory(const GrVkGpu* gpu, |
| 32 | VkImage image, |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 33 | bool linearTiling, |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 34 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 35 | void FreeImageMemory(const GrVkGpu* gpu, bool linearTiling, const GrVkAlloc& alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 36 | |
| 37 | VkPipelineStageFlags LayoutToPipelineStageFlags(const VkImageLayout layout); |
| 38 | |
| 39 | VkAccessFlags LayoutToSrcAccessMask(const VkImageLayout layout); |
jvanverth | 9d54afc | 2016-09-20 09:20:03 -0700 | [diff] [blame] | 40 | |
| 41 | void FlushMappedAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc); |
| 42 | void InvalidateMappedAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 43 | } |
| 44 | |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 45 | class GrVkFreeListAlloc { |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 46 | public: |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 47 | GrVkFreeListAlloc(VkDeviceSize size, VkDeviceSize alignment) |
| 48 | : fSize(size) |
| 49 | , fAlignment(alignment) |
| 50 | , fFreeSize(size) |
| 51 | , fLargestBlockSize(size) |
| 52 | , fLargestBlockOffset(0) { |
| 53 | Block* block = fFreeList.addToTail(); |
| 54 | block->fOffset = 0; |
| 55 | block->fSize = fSize; |
| 56 | } |
| 57 | ~GrVkFreeListAlloc() { |
| 58 | this->reset(); |
| 59 | } |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 60 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 61 | VkDeviceSize size() const { return fSize; } |
| 62 | VkDeviceSize alignment() const { return fAlignment; } |
| 63 | VkDeviceSize freeSize() const { return fFreeSize; } |
| 64 | VkDeviceSize largestBlockSize() const { return fLargestBlockSize; } |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 65 | |
| 66 | bool unallocated() const { return fSize == fFreeSize; } |
| 67 | |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 68 | protected: |
| 69 | bool alloc(VkDeviceSize requestedSize, VkDeviceSize* allocOffset, VkDeviceSize* allocSize); |
| 70 | void free(VkDeviceSize allocOffset, VkDeviceSize allocSize); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 71 | |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 72 | void reset() { |
| 73 | fSize = 0; |
| 74 | fAlignment = 0; |
| 75 | fFreeSize = 0; |
| 76 | fLargestBlockSize = 0; |
| 77 | fFreeList.reset(); |
| 78 | } |
| 79 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 80 | struct Block { |
| 81 | VkDeviceSize fOffset; |
| 82 | VkDeviceSize fSize; |
| 83 | }; |
| 84 | typedef SkTLList<Block, 16> FreeList; |
| 85 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 86 | VkDeviceSize fSize; |
| 87 | VkDeviceSize fAlignment; |
| 88 | VkDeviceSize fFreeSize; |
| 89 | VkDeviceSize fLargestBlockSize; |
| 90 | VkDeviceSize fLargestBlockOffset; |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 91 | FreeList fFreeList; |
| 92 | }; |
| 93 | |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 94 | class GrVkSubHeap : public GrVkFreeListAlloc { |
| 95 | public: |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 96 | GrVkSubHeap(const GrVkGpu* gpu, uint32_t memoryTypeIndex, uint32_t heapIndex, |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 97 | VkDeviceSize size, VkDeviceSize alignment); |
| 98 | ~GrVkSubHeap(); |
| 99 | |
| 100 | uint32_t memoryTypeIndex() const { return fMemoryTypeIndex; } |
| 101 | VkDeviceMemory memory() { return fAlloc; } |
| 102 | |
| 103 | bool alloc(VkDeviceSize requestedSize, GrVkAlloc* alloc); |
| 104 | void free(const GrVkAlloc& alloc); |
| 105 | |
| 106 | private: |
| 107 | const GrVkGpu* fGpu; |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 108 | #ifdef SK_DEBUG |
| 109 | uint32_t fHeapIndex; |
| 110 | #endif |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 111 | uint32_t fMemoryTypeIndex; |
| 112 | VkDeviceMemory fAlloc; |
| 113 | |
| 114 | typedef GrVkFreeListAlloc INHERITED; |
| 115 | }; |
| 116 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 117 | class GrVkHeap { |
| 118 | public: |
| 119 | enum Strategy { |
| 120 | kSubAlloc_Strategy, // alloc large subheaps and suballoc within them |
| 121 | kSingleAlloc_Strategy // alloc/recycle an individual subheap per object |
| 122 | }; |
| 123 | |
| 124 | GrVkHeap(const GrVkGpu* gpu, Strategy strategy, VkDeviceSize subHeapSize) |
| 125 | : fGpu(gpu) |
| 126 | , fSubHeapSize(subHeapSize) |
| 127 | , fAllocSize(0) |
| 128 | , fUsedSize(0) { |
| 129 | if (strategy == kSubAlloc_Strategy) { |
| 130 | fAllocFunc = &GrVkHeap::subAlloc; |
| 131 | } else { |
| 132 | fAllocFunc = &GrVkHeap::singleAlloc; |
| 133 | } |
| 134 | } |
| 135 | |
jvanverth | 82356cc | 2016-07-07 07:16:42 -0700 | [diff] [blame] | 136 | ~GrVkHeap() {} |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 137 | |
jvanverth | d6f8034 | 2016-06-16 04:42:30 -0700 | [diff] [blame] | 138 | VkDeviceSize allocSize() const { return fAllocSize; } |
| 139 | VkDeviceSize usedSize() const { return fUsedSize; } |
| 140 | |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 141 | bool alloc(VkDeviceSize size, VkDeviceSize alignment, uint32_t memoryTypeIndex, |
| 142 | uint32_t heapIndex, GrVkAlloc* alloc) { |
jvanverth | 6dc3af4 | 2016-06-16 14:05:09 -0700 | [diff] [blame] | 143 | SkASSERT(size > 0); |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 144 | return (*this.*fAllocFunc)(size, alignment, memoryTypeIndex, heapIndex, alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 145 | } |
| 146 | bool free(const GrVkAlloc& alloc); |
| 147 | |
| 148 | private: |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 149 | typedef bool (GrVkHeap::*AllocFunc)(VkDeviceSize size, VkDeviceSize alignment, |
| 150 | uint32_t memoryTypeIndex, uint32_t heapIndex, |
| 151 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 152 | |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 153 | bool subAlloc(VkDeviceSize size, VkDeviceSize alignment, |
| 154 | uint32_t memoryTypeIndex, uint32_t heapIndex, |
| 155 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 156 | bool singleAlloc(VkDeviceSize size, VkDeviceSize alignment, |
jvanverth | 68c3d30 | 2016-09-23 10:30:04 -0700 | [diff] [blame] | 157 | uint32_t memoryTypeIndex, uint32_t heapIndex, |
| 158 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 159 | |
| 160 | const GrVkGpu* fGpu; |
| 161 | VkDeviceSize fSubHeapSize; |
| 162 | VkDeviceSize fAllocSize; |
| 163 | VkDeviceSize fUsedSize; |
| 164 | AllocFunc fAllocFunc; |
Ben Wagner | 145dbcd | 2016-11-03 14:40:50 -0400 | [diff] [blame] | 165 | SkTArray<std::unique_ptr<GrVkSubHeap>> fSubHeaps; |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 166 | }; |
jvanverth | e50f3e7 | 2016-03-28 07:03:06 -0700 | [diff] [blame] | 167 | #endif |