blob: e0ab3a6c8ca7b141cf46b19b97b017ad3cab1647 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkMemory.h"
9
10#include "GrVkGpu.h"
11#include "GrVkUtil.h"
12
13static bool get_valid_memory_type_index(VkPhysicalDeviceMemoryProperties physDevMemProps,
14 uint32_t typeBits,
15 VkMemoryPropertyFlags requestedMemFlags,
16 uint32_t* typeIndex) {
17 uint32_t checkBit = 1;
18 for (uint32_t i = 0; i < 32; ++i) {
19 if (typeBits & checkBit) {
20 uint32_t supportedFlags = physDevMemProps.memoryTypes[i].propertyFlags &
21 requestedMemFlags;
22 if (supportedFlags == requestedMemFlags) {
23 *typeIndex = i;
24 return true;
25 }
26 }
27 checkBit <<= 1;
28 }
29 return false;
30}
31
32static bool alloc_device_memory(const GrVkGpu* gpu,
33 VkMemoryRequirements* memReqs,
34 const VkMemoryPropertyFlags flags,
35 VkDeviceMemory* memory) {
36 uint32_t typeIndex;
37 if (!get_valid_memory_type_index(gpu->physicalDeviceMemoryProperties(),
38 memReqs->memoryTypeBits,
39 flags,
40 &typeIndex)) {
41 return false;
42 }
43
44 VkMemoryAllocateInfo allocInfo = {
45 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
46 NULL, // pNext
47 memReqs->size, // allocationSize
48 typeIndex, // memoryTypeIndex
49 };
50
51 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateMemory(gpu->device(),
52 &allocInfo,
53 nullptr,
54 memory));
55 if (err) {
56 return false;
57 }
58 return true;
59}
60
61bool GrVkMemory::AllocAndBindBufferMemory(const GrVkGpu* gpu,
62 VkBuffer buffer,
63 const VkMemoryPropertyFlags flags,
jvanverth1e305ba2016-06-01 09:39:15 -070064 GrVkAlloc* alloc) {
jvanverthe50f3e72016-03-28 07:03:06 -070065 const GrVkInterface* iface = gpu->vkInterface();
Greg Daniel164a9f02016-02-22 09:56:40 -050066 VkDevice device = gpu->device();
67
68 VkMemoryRequirements memReqs;
jvanverthe50f3e72016-03-28 07:03:06 -070069 GR_VK_CALL(iface, GetBufferMemoryRequirements(device, buffer, &memReqs));
Greg Daniel164a9f02016-02-22 09:56:40 -050070
jvanverth1e305ba2016-06-01 09:39:15 -070071 if (!alloc_device_memory(gpu, &memReqs, flags, &alloc->fMemory)) {
Greg Daniel164a9f02016-02-22 09:56:40 -050072 return false;
73 }
jvanverth1e305ba2016-06-01 09:39:15 -070074 // for now, offset is always 0
75 alloc->fOffset = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -050076
jvanverth1e305ba2016-06-01 09:39:15 -070077 // Bind Memory to device
78 VkResult err = GR_VK_CALL(iface, BindBufferMemory(device, buffer,
79 alloc->fMemory, alloc->fOffset));
Greg Daniel164a9f02016-02-22 09:56:40 -050080 if (err) {
jvanverth1e305ba2016-06-01 09:39:15 -070081 GR_VK_CALL(iface, FreeMemory(device, alloc->fMemory, nullptr));
Greg Daniel164a9f02016-02-22 09:56:40 -050082 return false;
83 }
84 return true;
85}
86
jvanverth1e305ba2016-06-01 09:39:15 -070087void GrVkMemory::FreeBufferMemory(const GrVkGpu* gpu, const GrVkAlloc& alloc) {
88 const GrVkInterface* iface = gpu->vkInterface();
89 GR_VK_CALL(iface, FreeMemory(gpu->device(), alloc.fMemory, nullptr));
90}
91
Greg Daniel164a9f02016-02-22 09:56:40 -050092bool GrVkMemory::AllocAndBindImageMemory(const GrVkGpu* gpu,
93 VkImage image,
94 const VkMemoryPropertyFlags flags,
jvanverth1e305ba2016-06-01 09:39:15 -070095 GrVkAlloc* alloc) {
jvanverthe50f3e72016-03-28 07:03:06 -070096 const GrVkInterface* iface = gpu->vkInterface();
Greg Daniel164a9f02016-02-22 09:56:40 -050097 VkDevice device = gpu->device();
98
99 VkMemoryRequirements memReqs;
jvanverthe50f3e72016-03-28 07:03:06 -0700100 GR_VK_CALL(iface, GetImageMemoryRequirements(device, image, &memReqs));
Greg Daniel164a9f02016-02-22 09:56:40 -0500101
jvanverth1e305ba2016-06-01 09:39:15 -0700102 if (!alloc_device_memory(gpu, &memReqs, flags, &alloc->fMemory)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500103 return false;
104 }
jvanverth1e305ba2016-06-01 09:39:15 -0700105 // for now, offset is always 0
106 alloc->fOffset = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -0500107
jvanverth1e305ba2016-06-01 09:39:15 -0700108 // Bind Memory to device
109 VkResult err = GR_VK_CALL(iface, BindImageMemory(device, image,
110 alloc->fMemory, alloc->fOffset));
Greg Daniel164a9f02016-02-22 09:56:40 -0500111 if (err) {
jvanverth1e305ba2016-06-01 09:39:15 -0700112 GR_VK_CALL(iface, FreeMemory(device, alloc->fMemory, nullptr));
Greg Daniel164a9f02016-02-22 09:56:40 -0500113 return false;
114 }
115 return true;
116}
117
jvanverth1e305ba2016-06-01 09:39:15 -0700118void GrVkMemory::FreeImageMemory(const GrVkGpu* gpu, const GrVkAlloc& alloc) {
119 const GrVkInterface* iface = gpu->vkInterface();
120 GR_VK_CALL(iface, FreeMemory(gpu->device(), alloc.fMemory, nullptr));
121}
122
Greg Daniel164a9f02016-02-22 09:56:40 -0500123VkPipelineStageFlags GrVkMemory::LayoutToPipelineStageFlags(const VkImageLayout layout) {
124 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
125 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
126 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
127 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
128 return VK_PIPELINE_STAGE_TRANSFER_BIT;
129 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout ||
130 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout ||
131 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout ||
132 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
133 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
134 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
135 return VK_PIPELINE_STAGE_HOST_BIT;
136 }
137
138 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout);
139 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
140}
141
142VkAccessFlags GrVkMemory::LayoutToSrcAccessMask(const VkImageLayout layout) {
143 // Currently we assume we will never being doing any explict shader writes (this doesn't include
144 // color attachment or depth/stencil writes). So we will ignore the
145 // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT.
146
147 // We can only directly access the host memory if we are in preinitialized or general layout,
148 // and the image is linear.
149 // TODO: Add check for linear here so we are not always adding host to general, and we should
150 // only be in preinitialized if we are linear
151 VkAccessFlags flags = 0;;
152 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
153 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
154 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
155 VK_ACCESS_TRANSFER_WRITE_BIT |
156 VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT;
157 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
158 flags = VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT;
159 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
160 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
161 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) {
162 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
163 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
164 flags = VK_ACCESS_TRANSFER_WRITE_BIT;
165 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) {
166 flags = VK_ACCESS_TRANSFER_READ_BIT;
167 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
168 flags = VK_ACCESS_SHADER_READ_BIT;
169 }
170 return flags;
171}