Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrVkMemory_DEFINED |
| 9 | #define GrVkMemory_DEFINED |
| 10 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 11 | #include "GrVkBuffer.h" |
| 12 | #include "SkTArray.h" |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 13 | #include "vk/GrVkTypes.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 14 | |
| 15 | class GrVkGpu; |
| 16 | |
| 17 | namespace GrVkMemory { |
| 18 | /** |
| 19 | * Allocates vulkan device memory and binds it to the gpu's device for the given object. |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 20 | * Returns true if allocation succeeded. |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 21 | */ |
| 22 | bool AllocAndBindBufferMemory(const GrVkGpu* gpu, |
| 23 | VkBuffer buffer, |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 24 | GrVkBuffer::Type type, |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 25 | bool dynamic, |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 26 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 27 | void FreeBufferMemory(const GrVkGpu* gpu, GrVkBuffer::Type type, const GrVkAlloc& alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 28 | |
| 29 | bool AllocAndBindImageMemory(const GrVkGpu* gpu, |
| 30 | VkImage image, |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 31 | bool linearTiling, |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 32 | GrVkAlloc* alloc); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 33 | void FreeImageMemory(const GrVkGpu* gpu, bool linearTiling, const GrVkAlloc& alloc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 34 | |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 35 | // Maps the entire GrVkAlloc and returns a pointer to the start of the allocation. Underneath |
| 36 | // the hood, we may map more than the range of the GrVkAlloc (e.g. the entire VkDeviceMemory), |
| 37 | // but the pointer returned will always be to the start of the GrVkAlloc. The caller should also |
| 38 | // never assume more than the GrVkAlloc block has been mapped. |
| 39 | void* MapAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc); |
| 40 | void UnmapAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc); |
| 41 | |
| 42 | // For the Flush and Invalidate calls, the offset should be relative to the GrVkAlloc. Thus this |
| 43 | // will often be 0. The client does not need to make sure the offset and size are aligned to the |
| 44 | // nonCoherentAtomSize, the internal calls will handle that. |
Greg Daniel | e35a99e | 2018-03-02 11:44:22 -0500 | [diff] [blame] | 45 | void FlushMappedAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc, VkDeviceSize offset, |
| 46 | VkDeviceSize size); |
| 47 | void InvalidateMappedAlloc(const GrVkGpu* gpu, const GrVkAlloc& alloc, VkDeviceSize offset, |
| 48 | VkDeviceSize size); |
Greg Daniel | 81df041 | 2018-05-31 13:13:33 -0400 | [diff] [blame] | 49 | |
| 50 | // Helper for aligning and setting VkMappedMemoryRange for flushing/invalidating noncoherent |
| 51 | // memory. |
| 52 | void GetNonCoherentMappedMemoryRange(const GrVkAlloc&, VkDeviceSize offset, VkDeviceSize size, |
| 53 | VkDeviceSize alignment, VkMappedMemoryRange*); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 54 | } |
| 55 | |
jvanverth | e50f3e7 | 2016-03-28 07:03:06 -0700 | [diff] [blame] | 56 | #endif |