blob: a6171fc81b64fafce6e503743e1fb0a906bd1e73 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Greg Daniel164a9f02016-02-22 09:56:40 -05008#include "GrVkImage.h"
Brian Salomon614c1a82018-12-19 15:42:06 -05009#include "GrGpuResourcePriv.h"
10#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050011#include "GrVkMemory.h"
Brian Salomon614c1a82018-12-19 15:42:06 -050012#include "GrVkTexture.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050013#include "GrVkUtil.h"
14
15#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
16
Greg Danielf7828d02018-10-09 12:01:32 -040017VkPipelineStageFlags GrVkImage::LayoutToPipelineSrcStageFlags(const VkImageLayout layout) {
Greg Daniel6ddbafc2018-05-24 12:34:29 -040018 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
19 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
20 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
21 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
22 return VK_PIPELINE_STAGE_TRANSFER_BIT;
Greg Danielf7828d02018-10-09 12:01:32 -040023 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
24 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
25 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout ||
26 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout) {
27 return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
28 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
29 return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040030 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
31 return VK_PIPELINE_STAGE_HOST_BIT;
32 }
33
34 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout);
35 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
36}
37
38VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) {
39 // Currently we assume we will never being doing any explict shader writes (this doesn't include
40 // color attachment or depth/stencil writes). So we will ignore the
41 // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT.
42
43 // We can only directly access the host memory if we are in preinitialized or general layout,
44 // and the image is linear.
45 // TODO: Add check for linear here so we are not always adding host to general, and we should
46 // only be in preinitialized if we are linear
Brian Salomon23356442018-11-30 15:33:19 -050047 VkAccessFlags flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040048 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
49 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
50 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
51 VK_ACCESS_TRANSFER_WRITE_BIT |
52 VK_ACCESS_TRANSFER_READ_BIT |
53 VK_ACCESS_SHADER_READ_BIT |
54 VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT;
55 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
56 flags = VK_ACCESS_HOST_WRITE_BIT;
57 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
Greg Danielf7828d02018-10-09 12:01:32 -040058 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040059 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) {
60 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
61 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
62 flags = VK_ACCESS_TRANSFER_WRITE_BIT;
63 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) {
64 flags = VK_ACCESS_TRANSFER_READ_BIT;
65 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
66 flags = VK_ACCESS_SHADER_READ_BIT;
67 }
68 return flags;
69}
70
egdaniel58a8d922016-04-21 08:03:10 -070071VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) {
72 switch (format) {
73 case VK_FORMAT_S8_UINT:
74 return VK_IMAGE_ASPECT_STENCIL_BIT;
75 case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough
76 case VK_FORMAT_D32_SFLOAT_S8_UINT:
77 return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
78 default:
Greg Daniel81b80592017-12-13 10:20:04 -050079 SkASSERT(GrVkFormatIsSupported(format));
egdaniel58a8d922016-04-21 08:03:10 -070080 return VK_IMAGE_ASPECT_COLOR_BIT;
81 }
82}
83
Greg Daniel164a9f02016-02-22 09:56:40 -050084void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout,
Greg Daniel164a9f02016-02-22 09:56:40 -050085 VkAccessFlags dstAccessMask,
Greg Daniel164a9f02016-02-22 09:56:40 -050086 VkPipelineStageFlags dstStageMask,
Greg Danielecddbc02018-08-30 16:39:34 -040087 bool byRegion, bool releaseFamilyQueue) {
jvanverth2af0f1b2016-05-03 10:36:49 -070088 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout &&
89 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout);
egdanielb2df0c22016-05-13 11:30:37 -070090 VkImageLayout currentLayout = this->currentLayout();
egdaniel19ff1032016-08-31 10:13:08 -070091
Greg Danielecddbc02018-08-30 16:39:34 -040092 if (releaseFamilyQueue && fInfo.fCurrentQueueFamily == fInitialQueueFamily) {
93 // We never transfered the image to this queue and we are releasing it so don't do anything.
94 return;
95 }
96
Greg Daniela3b6a552017-03-21 09:48:44 -040097 // If the old and new layout are the same and the layout is a read only layout, there is no need
98 // to put in a barrier.
99 if (newLayout == currentLayout &&
Greg Daniel59dc1482019-02-22 10:46:38 -0500100 !releaseFamilyQueue &&
Greg Daniela3b6a552017-03-21 09:48:44 -0400101 (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout ||
102 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout ||
103 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500104 return;
105 }
jvanverth50c46c72016-05-06 12:31:28 -0700106
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400107 VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout);
Greg Danielf7828d02018-10-09 12:01:32 -0400108 VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(currentLayout);
jvanverth50c46c72016-05-06 12:31:28 -0700109
egdanielb2df0c22016-05-13 11:30:37 -0700110 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat);
Greg Danielecddbc02018-08-30 16:39:34 -0400111
112 uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
113 uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
114 if (fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_IGNORED &&
115 gpu->queueIndex() != fInfo.fCurrentQueueFamily) {
116 // The image still is owned by its original queue family and we need to transfer it into
117 // ours.
118 SkASSERT(!releaseFamilyQueue);
119 SkASSERT(fInfo.fCurrentQueueFamily == fInitialQueueFamily);
120
121 srcQueueFamilyIndex = fInfo.fCurrentQueueFamily;
122 dstQueueFamilyIndex = gpu->queueIndex();
123 fInfo.fCurrentQueueFamily = gpu->queueIndex();
124 } else if (releaseFamilyQueue) {
125 // We are releasing the image so we must transfer the image back to its original queue
126 // family.
127 SkASSERT(fInfo.fCurrentQueueFamily == gpu->queueIndex());
128 srcQueueFamilyIndex = fInfo.fCurrentQueueFamily;
129 dstQueueFamilyIndex = fInitialQueueFamily;
130 fInfo.fCurrentQueueFamily = fInitialQueueFamily;
131 }
132
Greg Daniel164a9f02016-02-22 09:56:40 -0500133 VkImageMemoryBarrier imageMemoryBarrier = {
jvanverth2af0f1b2016-05-03 10:36:49 -0700134 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
egdaniel3602d4f2016-08-12 11:58:53 -0700135 nullptr, // pNext
jvanverth2af0f1b2016-05-03 10:36:49 -0700136 srcAccessMask, // outputMask
137 dstAccessMask, // inputMask
egdanielb2df0c22016-05-13 11:30:37 -0700138 currentLayout, // oldLayout
jvanverth2af0f1b2016-05-03 10:36:49 -0700139 newLayout, // newLayout
Greg Danielecddbc02018-08-30 16:39:34 -0400140 srcQueueFamilyIndex, // srcQueueFamilyIndex
141 dstQueueFamilyIndex, // dstQueueFamilyIndex
egdanielb2df0c22016-05-13 11:30:37 -0700142 fInfo.fImage, // image
143 { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange
Greg Daniel164a9f02016-02-22 09:56:40 -0500144 };
145
Greg Daniel59dc1482019-02-22 10:46:38 -0500146 gpu->addImageMemoryBarrier(this->resource(), srcStageMask, dstStageMask, byRegion,
147 &imageMemoryBarrier);
Greg Daniel164a9f02016-02-22 09:56:40 -0500148
Greg Daniel52e16d92018-04-10 09:34:07 -0400149 this->updateImageLayout(newLayout);
Greg Daniel164a9f02016-02-22 09:56:40 -0500150}
151
egdanielb2df0c22016-05-13 11:30:37 -0700152bool GrVkImage::InitImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) {
egdanielab527a52016-06-28 08:07:26 -0700153 if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) {
154 return false;
155 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500156 VkImage image = 0;
jvanverth1e305ba2016-06-01 09:39:15 -0700157 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -0500158
jvanverth6b6ffc42016-06-13 14:28:07 -0700159 bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling;
160 VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED
161 : VK_IMAGE_LAYOUT_UNDEFINED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500162
163 // Create Image
164 VkSampleCountFlagBits vkSamples;
165 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) {
egdanielb2df0c22016-05-13 11:30:37 -0700166 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500167 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700168
169 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling ||
170 VK_SAMPLE_COUNT_1_BIT == vkSamples);
171
Greg Daniel164a9f02016-02-22 09:56:40 -0500172 const VkImageCreateInfo imageCreateInfo = {
173 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400174 nullptr, // pNext
Brian Osman2b23c4b2018-06-01 12:25:08 -0400175 0, // VkImageCreateFlags
Greg Daniel164a9f02016-02-22 09:56:40 -0500176 imageDesc.fImageType, // VkImageType
177 imageDesc.fFormat, // VkFormat
178 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D
179 imageDesc.fLevels, // mipLevels
180 1, // arrayLayers
181 vkSamples, // samples
182 imageDesc.fImageTiling, // VkImageTiling
183 imageDesc.fUsageFlags, // VkImageUsageFlags
184 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
185 0, // queueFamilyCount
186 0, // pQueueFamilyIndices
187 initialLayout // initialLayout
188 };
189
egdanielb2df0c22016-05-13 11:30:37 -0700190 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCreateInfo, nullptr,
191 &image));
Greg Daniel164a9f02016-02-22 09:56:40 -0500192
jvanverth6b6ffc42016-06-13 14:28:07 -0700193 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, isLinear, &alloc)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500194 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr));
egdanielb2df0c22016-05-13 11:30:37 -0700195 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500196 }
197
egdanielb2df0c22016-05-13 11:30:37 -0700198 info->fImage = image;
199 info->fAlloc = alloc;
200 info->fImageTiling = imageDesc.fImageTiling;
201 info->fImageLayout = initialLayout;
202 info->fFormat = imageDesc.fFormat;
203 info->fLevelCount = imageDesc.fLevels;
Greg Danielecddbc02018-08-30 16:39:34 -0400204 info->fCurrentQueueFamily = VK_QUEUE_FAMILY_IGNORED;
egdanielb2df0c22016-05-13 11:30:37 -0700205 return true;
206}
Greg Daniel164a9f02016-02-22 09:56:40 -0500207
egdanielb2df0c22016-05-13 11:30:37 -0700208void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) {
209 VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700210 bool isLinear = VK_IMAGE_TILING_LINEAR == info->fImageTiling;
211 GrVkMemory::FreeImageMemory(gpu, isLinear, info->fAlloc);
egdanielb2df0c22016-05-13 11:30:37 -0700212}
213
Greg Daniel164a9f02016-02-22 09:56:40 -0500214GrVkImage::~GrVkImage() {
215 // should have been released or abandoned first
216 SkASSERT(!fResource);
217}
218
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500219void GrVkImage::releaseImage(GrVkGpu* gpu) {
Greg Danielecddbc02018-08-30 16:39:34 -0400220 if (fInfo.fCurrentQueueFamily != fInitialQueueFamily) {
Greg Daniel950dfd72019-02-04 11:16:37 -0500221 // The Vulkan spec is vague on what to put for the dstStageMask here. The spec for image
222 // memory barrier says the dstStageMask must not be zero. However, in the spec when it talks
223 // about family queue transfers it says the dstStageMask is ignored and should be set to
224 // zero. Assuming it really is ignored we set it to VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT here
225 // since it makes the Vulkan validation layers happy.
226 this->setImageLayout(gpu, this->currentLayout(), 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
227 false, true);
Greg Danielecddbc02018-08-30 16:39:34 -0400228 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500229 if (fResource) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500230 fResource->removeOwningTexture();
Greg Daniel164a9f02016-02-22 09:56:40 -0500231 fResource->unref(gpu);
232 fResource = nullptr;
233 }
234}
235
236void GrVkImage::abandonImage() {
237 if (fResource) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500238 fResource->removeOwningTexture();
Greg Daniel164a9f02016-02-22 09:56:40 -0500239 fResource->unrefAndAbandon();
240 fResource = nullptr;
241 }
242}
243
Greg Daniel6a0176b2018-01-30 09:28:44 -0500244void GrVkImage::setResourceRelease(sk_sp<GrReleaseProcHelper> releaseHelper) {
Greg Danielb46add82019-01-02 14:51:29 -0500245 SkASSERT(fResource);
Greg Danielcef213c2017-04-21 11:52:27 -0400246 // Forward the release proc on to GrVkImage::Resource
Greg Daniel6a0176b2018-01-30 09:28:44 -0500247 fResource->setRelease(std::move(releaseHelper));
Greg Danielcef213c2017-04-21 11:52:27 -0400248}
249
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500250void GrVkImage::Resource::freeGPUData(GrVkGpu* gpu) const {
Brian Salomon8cabb322019-02-22 10:44:19 -0500251 this->invokeReleaseProc();
Greg Daniel164a9f02016-02-22 09:56:40 -0500252 VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700253 bool isLinear = (VK_IMAGE_TILING_LINEAR == fImageTiling);
254 GrVkMemory::FreeImageMemory(gpu, isLinear, fAlloc);
jvanverth0fcfb752016-03-09 09:57:52 -0800255}
jvanverthfe170d22016-03-22 13:15:44 -0700256
Brian Salomon614c1a82018-12-19 15:42:06 -0500257void GrVkImage::Resource::setIdleProc(GrVkTexture* owner, GrTexture::IdleProc proc,
258 void* context) const {
259 fOwningTexture = owner;
260 fIdleProc = proc;
261 fIdleProcContext = context;
262}
263
264void GrVkImage::Resource::removeOwningTexture() const { fOwningTexture = nullptr; }
265
266void GrVkImage::Resource::notifyAddedToCommandBuffer() const { ++fNumCommandBufferOwners; }
267
268void GrVkImage::Resource::notifyRemovedFromCommandBuffer() const {
269 SkASSERT(fNumCommandBufferOwners);
270 if (--fNumCommandBufferOwners || !fIdleProc) {
271 return;
272 }
Brian Salomon9bc76d92019-01-24 12:18:33 -0500273 if (fOwningTexture && fOwningTexture->resourcePriv().hasRefOrPendingIO()) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500274 return;
275 }
276 fIdleProc(fIdleProcContext);
277 if (fOwningTexture) {
278 fOwningTexture->setIdleProc(nullptr, nullptr);
279 // Changing the texture's proc should change ours.
280 SkASSERT(!fIdleProc);
281 SkASSERT(!fIdleProc);
282 } else {
283 fIdleProc = nullptr;
284 fIdleProcContext = nullptr;
285 }
286}
287
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500288void GrVkImage::BorrowedResource::freeGPUData(GrVkGpu* gpu) const {
Greg Danielcef213c2017-04-21 11:52:27 -0400289 this->invokeReleaseProc();
jvanverthfe170d22016-03-22 13:15:44 -0700290}
Greg Danielcef213c2017-04-21 11:52:27 -0400291
292void GrVkImage::BorrowedResource::abandonGPUData() const {
293 this->invokeReleaseProc();
294}
295
Greg Daniel59dc1482019-02-22 10:46:38 -0500296#if GR_TEST_UTILS
297void GrVkImage::setCurrentQueueFamilyToGraphicsQueue(GrVkGpu* gpu) {
298 fInfo.fCurrentQueueFamily = gpu->queueIndex();
299}
300#endif
301