Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrVkGpu.h" |
| 9 | #include "GrVkImage.h" |
| 10 | #include "GrVkMemory.h" |
| 11 | #include "GrVkUtil.h" |
| 12 | |
| 13 | #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X) |
| 14 | |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 15 | VkPipelineStageFlags GrVkImage::LayoutToPipelineStageFlags(const VkImageLayout layout) { |
| 16 | if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 17 | return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; |
| 18 | } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || |
| 19 | VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 20 | return VK_PIPELINE_STAGE_TRANSFER_BIT; |
| 21 | } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout || |
| 22 | VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || |
| 23 | VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout || |
| 24 | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 25 | return VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT; |
| 26 | } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 27 | return VK_PIPELINE_STAGE_HOST_BIT; |
| 28 | } |
| 29 | |
| 30 | SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout); |
| 31 | return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 32 | } |
| 33 | |
| 34 | VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) { |
| 35 | // Currently we assume we will never being doing any explict shader writes (this doesn't include |
| 36 | // color attachment or depth/stencil writes). So we will ignore the |
| 37 | // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT. |
| 38 | |
| 39 | // We can only directly access the host memory if we are in preinitialized or general layout, |
| 40 | // and the image is linear. |
| 41 | // TODO: Add check for linear here so we are not always adding host to general, and we should |
| 42 | // only be in preinitialized if we are linear |
| 43 | VkAccessFlags flags = 0;; |
| 44 | if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 45 | flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | |
| 46 | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | |
| 47 | VK_ACCESS_TRANSFER_WRITE_BIT | |
| 48 | VK_ACCESS_TRANSFER_READ_BIT | |
| 49 | VK_ACCESS_SHADER_READ_BIT | |
| 50 | VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT; |
| 51 | } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 52 | flags = VK_ACCESS_HOST_WRITE_BIT; |
| 53 | } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) { |
| 54 | flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; |
| 55 | } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) { |
| 56 | flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; |
| 57 | } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 58 | flags = VK_ACCESS_TRANSFER_WRITE_BIT; |
| 59 | } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) { |
| 60 | flags = VK_ACCESS_TRANSFER_READ_BIT; |
| 61 | } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 62 | flags = VK_ACCESS_SHADER_READ_BIT; |
| 63 | } |
| 64 | return flags; |
| 65 | } |
| 66 | |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 67 | VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) { |
| 68 | switch (format) { |
| 69 | case VK_FORMAT_S8_UINT: |
| 70 | return VK_IMAGE_ASPECT_STENCIL_BIT; |
| 71 | case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough |
| 72 | case VK_FORMAT_D32_SFLOAT_S8_UINT: |
| 73 | return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; |
| 74 | default: |
Greg Daniel | 81b8059 | 2017-12-13 10:20:04 -0500 | [diff] [blame] | 75 | SkASSERT(GrVkFormatIsSupported(format)); |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 76 | return VK_IMAGE_ASPECT_COLOR_BIT; |
| 77 | } |
| 78 | } |
| 79 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 80 | void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 81 | VkAccessFlags dstAccessMask, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 82 | VkPipelineStageFlags dstStageMask, |
| 83 | bool byRegion) { |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 84 | SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout && |
| 85 | VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 86 | VkImageLayout currentLayout = this->currentLayout(); |
egdaniel | 19ff103 | 2016-08-31 10:13:08 -0700 | [diff] [blame] | 87 | |
Greg Daniel | a3b6a55 | 2017-03-21 09:48:44 -0400 | [diff] [blame] | 88 | // If the old and new layout are the same and the layout is a read only layout, there is no need |
| 89 | // to put in a barrier. |
| 90 | if (newLayout == currentLayout && |
| 91 | (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout || |
| 92 | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout || |
| 93 | VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 94 | return; |
| 95 | } |
jvanverth | 50c46c7 | 2016-05-06 12:31:28 -0700 | [diff] [blame] | 96 | |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 97 | VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout); |
| 98 | VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineStageFlags(currentLayout); |
jvanverth | 50c46c7 | 2016-05-06 12:31:28 -0700 | [diff] [blame] | 99 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 100 | VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 101 | VkImageMemoryBarrier imageMemoryBarrier = { |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 102 | VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType |
egdaniel | 3602d4f | 2016-08-12 11:58:53 -0700 | [diff] [blame] | 103 | nullptr, // pNext |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 104 | srcAccessMask, // outputMask |
| 105 | dstAccessMask, // inputMask |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 106 | currentLayout, // oldLayout |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 107 | newLayout, // newLayout |
| 108 | VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex |
| 109 | VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 110 | fInfo.fImage, // image |
| 111 | { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 112 | }; |
| 113 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 114 | gpu->addImageMemoryBarrier(srcStageMask, dstStageMask, byRegion, &imageMemoryBarrier); |
| 115 | |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 116 | this->updateImageLayout(newLayout); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 117 | } |
| 118 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 119 | bool GrVkImage::InitImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) { |
egdaniel | ab527a5 | 2016-06-28 08:07:26 -0700 | [diff] [blame] | 120 | if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) { |
| 121 | return false; |
| 122 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 123 | VkImage image = 0; |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 124 | GrVkAlloc alloc; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 125 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 126 | bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling; |
| 127 | VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED |
| 128 | : VK_IMAGE_LAYOUT_UNDEFINED; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 129 | |
| 130 | // Create Image |
| 131 | VkSampleCountFlagBits vkSamples; |
| 132 | if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) { |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 133 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 134 | } |
egdaniel | 8f1dcaa | 2016-04-01 10:10:45 -0700 | [diff] [blame] | 135 | |
| 136 | SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling || |
| 137 | VK_SAMPLE_COUNT_1_BIT == vkSamples); |
| 138 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 139 | const VkImageCreateInfo imageCreateInfo = { |
| 140 | VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType |
Ben Wagner | a93a14a | 2017-08-28 10:34:05 -0400 | [diff] [blame] | 141 | nullptr, // pNext |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 142 | 0, // VkImageCreateFlags |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 143 | imageDesc.fImageType, // VkImageType |
| 144 | imageDesc.fFormat, // VkFormat |
| 145 | { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D |
| 146 | imageDesc.fLevels, // mipLevels |
| 147 | 1, // arrayLayers |
| 148 | vkSamples, // samples |
| 149 | imageDesc.fImageTiling, // VkImageTiling |
| 150 | imageDesc.fUsageFlags, // VkImageUsageFlags |
| 151 | VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode |
| 152 | 0, // queueFamilyCount |
| 153 | 0, // pQueueFamilyIndices |
| 154 | initialLayout // initialLayout |
| 155 | }; |
| 156 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 157 | GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCreateInfo, nullptr, |
| 158 | &image)); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 159 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 160 | if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, isLinear, &alloc)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 161 | VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr)); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 162 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 163 | } |
| 164 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 165 | info->fImage = image; |
| 166 | info->fAlloc = alloc; |
| 167 | info->fImageTiling = imageDesc.fImageTiling; |
| 168 | info->fImageLayout = initialLayout; |
| 169 | info->fFormat = imageDesc.fFormat; |
| 170 | info->fLevelCount = imageDesc.fLevels; |
| 171 | return true; |
| 172 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 173 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 174 | void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) { |
| 175 | VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 176 | bool isLinear = VK_IMAGE_TILING_LINEAR == info->fImageTiling; |
| 177 | GrVkMemory::FreeImageMemory(gpu, isLinear, info->fAlloc); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 178 | } |
| 179 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 180 | void GrVkImage::setNewResource(VkImage image, const GrVkAlloc& alloc, VkImageTiling tiling) { |
| 181 | fResource = new Resource(image, alloc, tiling); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 182 | } |
| 183 | |
| 184 | GrVkImage::~GrVkImage() { |
| 185 | // should have been released or abandoned first |
| 186 | SkASSERT(!fResource); |
| 187 | } |
| 188 | |
| 189 | void GrVkImage::releaseImage(const GrVkGpu* gpu) { |
| 190 | if (fResource) { |
| 191 | fResource->unref(gpu); |
| 192 | fResource = nullptr; |
| 193 | } |
| 194 | } |
| 195 | |
| 196 | void GrVkImage::abandonImage() { |
| 197 | if (fResource) { |
| 198 | fResource->unrefAndAbandon(); |
| 199 | fResource = nullptr; |
| 200 | } |
| 201 | } |
| 202 | |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 203 | void GrVkImage::setResourceRelease(sk_sp<GrReleaseProcHelper> releaseHelper) { |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 204 | // Forward the release proc on to GrVkImage::Resource |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 205 | fResource->setRelease(std::move(releaseHelper)); |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 206 | } |
| 207 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 208 | void GrVkImage::Resource::freeGPUData(const GrVkGpu* gpu) const { |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 209 | SkASSERT(!fReleaseHelper); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 210 | VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 211 | bool isLinear = (VK_IMAGE_TILING_LINEAR == fImageTiling); |
| 212 | GrVkMemory::FreeImageMemory(gpu, isLinear, fAlloc); |
jvanverth | 0fcfb75 | 2016-03-09 09:57:52 -0800 | [diff] [blame] | 213 | } |
jvanverth | fe170d2 | 2016-03-22 13:15:44 -0700 | [diff] [blame] | 214 | |
| 215 | void GrVkImage::BorrowedResource::freeGPUData(const GrVkGpu* gpu) const { |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 216 | this->invokeReleaseProc(); |
jvanverth | fe170d2 | 2016-03-22 13:15:44 -0700 | [diff] [blame] | 217 | } |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 218 | |
| 219 | void GrVkImage::BorrowedResource::abandonGPUData() const { |
| 220 | this->invokeReleaseProc(); |
| 221 | } |
| 222 | |