blob: a99134b7446b33d17ad9ba7bf152e8aa74200a22 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkGpu.h"
9#include "GrVkImage.h"
10#include "GrVkMemory.h"
11#include "GrVkUtil.h"
12
13#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
14
Greg Daniel6ddbafc2018-05-24 12:34:29 -040015VkPipelineStageFlags GrVkImage::LayoutToPipelineStageFlags(const VkImageLayout layout) {
16 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
17 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
18 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
19 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
20 return VK_PIPELINE_STAGE_TRANSFER_BIT;
21 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout ||
22 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout ||
23 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout ||
24 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
25 return VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
26 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
27 return VK_PIPELINE_STAGE_HOST_BIT;
28 }
29
30 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout);
31 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
32}
33
34VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) {
35 // Currently we assume we will never being doing any explict shader writes (this doesn't include
36 // color attachment or depth/stencil writes). So we will ignore the
37 // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT.
38
39 // We can only directly access the host memory if we are in preinitialized or general layout,
40 // and the image is linear.
41 // TODO: Add check for linear here so we are not always adding host to general, and we should
42 // only be in preinitialized if we are linear
43 VkAccessFlags flags = 0;;
44 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
45 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
46 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
47 VK_ACCESS_TRANSFER_WRITE_BIT |
48 VK_ACCESS_TRANSFER_READ_BIT |
49 VK_ACCESS_SHADER_READ_BIT |
50 VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT;
51 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
52 flags = VK_ACCESS_HOST_WRITE_BIT;
53 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
54 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
55 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) {
56 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
57 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
58 flags = VK_ACCESS_TRANSFER_WRITE_BIT;
59 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) {
60 flags = VK_ACCESS_TRANSFER_READ_BIT;
61 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
62 flags = VK_ACCESS_SHADER_READ_BIT;
63 }
64 return flags;
65}
66
egdaniel58a8d922016-04-21 08:03:10 -070067VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) {
68 switch (format) {
69 case VK_FORMAT_S8_UINT:
70 return VK_IMAGE_ASPECT_STENCIL_BIT;
71 case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough
72 case VK_FORMAT_D32_SFLOAT_S8_UINT:
73 return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
74 default:
Greg Daniel81b80592017-12-13 10:20:04 -050075 SkASSERT(GrVkFormatIsSupported(format));
egdaniel58a8d922016-04-21 08:03:10 -070076 return VK_IMAGE_ASPECT_COLOR_BIT;
77 }
78}
79
Greg Daniel164a9f02016-02-22 09:56:40 -050080void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout,
Greg Daniel164a9f02016-02-22 09:56:40 -050081 VkAccessFlags dstAccessMask,
Greg Daniel164a9f02016-02-22 09:56:40 -050082 VkPipelineStageFlags dstStageMask,
83 bool byRegion) {
jvanverth2af0f1b2016-05-03 10:36:49 -070084 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout &&
85 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout);
egdanielb2df0c22016-05-13 11:30:37 -070086 VkImageLayout currentLayout = this->currentLayout();
egdaniel19ff1032016-08-31 10:13:08 -070087
Greg Daniela3b6a552017-03-21 09:48:44 -040088 // If the old and new layout are the same and the layout is a read only layout, there is no need
89 // to put in a barrier.
90 if (newLayout == currentLayout &&
91 (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout ||
92 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout ||
93 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) {
Greg Daniel164a9f02016-02-22 09:56:40 -050094 return;
95 }
jvanverth50c46c72016-05-06 12:31:28 -070096
Greg Daniel6ddbafc2018-05-24 12:34:29 -040097 VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout);
98 VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineStageFlags(currentLayout);
jvanverth50c46c72016-05-06 12:31:28 -070099
egdanielb2df0c22016-05-13 11:30:37 -0700100 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat);
Greg Daniel164a9f02016-02-22 09:56:40 -0500101 VkImageMemoryBarrier imageMemoryBarrier = {
jvanverth2af0f1b2016-05-03 10:36:49 -0700102 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
egdaniel3602d4f2016-08-12 11:58:53 -0700103 nullptr, // pNext
jvanverth2af0f1b2016-05-03 10:36:49 -0700104 srcAccessMask, // outputMask
105 dstAccessMask, // inputMask
egdanielb2df0c22016-05-13 11:30:37 -0700106 currentLayout, // oldLayout
jvanverth2af0f1b2016-05-03 10:36:49 -0700107 newLayout, // newLayout
108 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
109 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
egdanielb2df0c22016-05-13 11:30:37 -0700110 fInfo.fImage, // image
111 { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange
Greg Daniel164a9f02016-02-22 09:56:40 -0500112 };
113
Greg Daniel164a9f02016-02-22 09:56:40 -0500114 gpu->addImageMemoryBarrier(srcStageMask, dstStageMask, byRegion, &imageMemoryBarrier);
115
Greg Daniel52e16d92018-04-10 09:34:07 -0400116 this->updateImageLayout(newLayout);
Greg Daniel164a9f02016-02-22 09:56:40 -0500117}
118
egdanielb2df0c22016-05-13 11:30:37 -0700119bool GrVkImage::InitImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) {
egdanielab527a52016-06-28 08:07:26 -0700120 if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) {
121 return false;
122 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500123 VkImage image = 0;
jvanverth1e305ba2016-06-01 09:39:15 -0700124 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -0500125
jvanverth6b6ffc42016-06-13 14:28:07 -0700126 bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling;
127 VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED
128 : VK_IMAGE_LAYOUT_UNDEFINED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500129
130 // Create Image
131 VkSampleCountFlagBits vkSamples;
132 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) {
egdanielb2df0c22016-05-13 11:30:37 -0700133 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500134 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700135
136 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling ||
137 VK_SAMPLE_COUNT_1_BIT == vkSamples);
138
Greg Daniel164a9f02016-02-22 09:56:40 -0500139 const VkImageCreateInfo imageCreateInfo = {
140 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400141 nullptr, // pNext
Brian Osman2b23c4b2018-06-01 12:25:08 -0400142 0, // VkImageCreateFlags
Greg Daniel164a9f02016-02-22 09:56:40 -0500143 imageDesc.fImageType, // VkImageType
144 imageDesc.fFormat, // VkFormat
145 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D
146 imageDesc.fLevels, // mipLevels
147 1, // arrayLayers
148 vkSamples, // samples
149 imageDesc.fImageTiling, // VkImageTiling
150 imageDesc.fUsageFlags, // VkImageUsageFlags
151 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
152 0, // queueFamilyCount
153 0, // pQueueFamilyIndices
154 initialLayout // initialLayout
155 };
156
egdanielb2df0c22016-05-13 11:30:37 -0700157 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCreateInfo, nullptr,
158 &image));
Greg Daniel164a9f02016-02-22 09:56:40 -0500159
jvanverth6b6ffc42016-06-13 14:28:07 -0700160 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, isLinear, &alloc)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500161 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr));
egdanielb2df0c22016-05-13 11:30:37 -0700162 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500163 }
164
egdanielb2df0c22016-05-13 11:30:37 -0700165 info->fImage = image;
166 info->fAlloc = alloc;
167 info->fImageTiling = imageDesc.fImageTiling;
168 info->fImageLayout = initialLayout;
169 info->fFormat = imageDesc.fFormat;
170 info->fLevelCount = imageDesc.fLevels;
171 return true;
172}
Greg Daniel164a9f02016-02-22 09:56:40 -0500173
egdanielb2df0c22016-05-13 11:30:37 -0700174void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) {
175 VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700176 bool isLinear = VK_IMAGE_TILING_LINEAR == info->fImageTiling;
177 GrVkMemory::FreeImageMemory(gpu, isLinear, info->fAlloc);
egdanielb2df0c22016-05-13 11:30:37 -0700178}
179
jvanverth6b6ffc42016-06-13 14:28:07 -0700180void GrVkImage::setNewResource(VkImage image, const GrVkAlloc& alloc, VkImageTiling tiling) {
181 fResource = new Resource(image, alloc, tiling);
Greg Daniel164a9f02016-02-22 09:56:40 -0500182}
183
184GrVkImage::~GrVkImage() {
185 // should have been released or abandoned first
186 SkASSERT(!fResource);
187}
188
189void GrVkImage::releaseImage(const GrVkGpu* gpu) {
190 if (fResource) {
191 fResource->unref(gpu);
192 fResource = nullptr;
193 }
194}
195
196void GrVkImage::abandonImage() {
197 if (fResource) {
198 fResource->unrefAndAbandon();
199 fResource = nullptr;
200 }
201}
202
Greg Daniel6a0176b2018-01-30 09:28:44 -0500203void GrVkImage::setResourceRelease(sk_sp<GrReleaseProcHelper> releaseHelper) {
Greg Danielcef213c2017-04-21 11:52:27 -0400204 // Forward the release proc on to GrVkImage::Resource
Greg Daniel6a0176b2018-01-30 09:28:44 -0500205 fResource->setRelease(std::move(releaseHelper));
Greg Danielcef213c2017-04-21 11:52:27 -0400206}
207
Greg Daniel164a9f02016-02-22 09:56:40 -0500208void GrVkImage::Resource::freeGPUData(const GrVkGpu* gpu) const {
Greg Daniel6a0176b2018-01-30 09:28:44 -0500209 SkASSERT(!fReleaseHelper);
Greg Daniel164a9f02016-02-22 09:56:40 -0500210 VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700211 bool isLinear = (VK_IMAGE_TILING_LINEAR == fImageTiling);
212 GrVkMemory::FreeImageMemory(gpu, isLinear, fAlloc);
jvanverth0fcfb752016-03-09 09:57:52 -0800213}
jvanverthfe170d22016-03-22 13:15:44 -0700214
215void GrVkImage::BorrowedResource::freeGPUData(const GrVkGpu* gpu) const {
Greg Danielcef213c2017-04-21 11:52:27 -0400216 this->invokeReleaseProc();
jvanverthfe170d22016-03-22 13:15:44 -0700217}
Greg Danielcef213c2017-04-21 11:52:27 -0400218
219void GrVkImage::BorrowedResource::abandonGPUData() const {
220 this->invokeReleaseProc();
221}
222