Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 8 | #include "GrVkImage.h" |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 9 | #include "GrGpuResourcePriv.h" |
| 10 | #include "GrVkGpu.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 11 | #include "GrVkMemory.h" |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 12 | #include "GrVkTexture.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 13 | #include "GrVkUtil.h" |
| 14 | |
| 15 | #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X) |
| 16 | |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 17 | VkPipelineStageFlags GrVkImage::LayoutToPipelineSrcStageFlags(const VkImageLayout layout) { |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 18 | if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 19 | return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; |
| 20 | } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || |
| 21 | VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 22 | return VK_PIPELINE_STAGE_TRANSFER_BIT; |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 23 | } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) { |
| 24 | return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
| 25 | } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || |
| 26 | VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout) { |
| 27 | return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; |
| 28 | } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 29 | return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 30 | } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 31 | return VK_PIPELINE_STAGE_HOST_BIT; |
| 32 | } |
| 33 | |
| 34 | SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout); |
| 35 | return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 36 | } |
| 37 | |
| 38 | VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) { |
| 39 | // Currently we assume we will never being doing any explict shader writes (this doesn't include |
| 40 | // color attachment or depth/stencil writes). So we will ignore the |
| 41 | // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT. |
| 42 | |
| 43 | // We can only directly access the host memory if we are in preinitialized or general layout, |
| 44 | // and the image is linear. |
| 45 | // TODO: Add check for linear here so we are not always adding host to general, and we should |
| 46 | // only be in preinitialized if we are linear |
Brian Salomon | 2335644 | 2018-11-30 15:33:19 -0500 | [diff] [blame] | 47 | VkAccessFlags flags = 0; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 48 | if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 49 | flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | |
| 50 | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | |
| 51 | VK_ACCESS_TRANSFER_WRITE_BIT | |
| 52 | VK_ACCESS_TRANSFER_READ_BIT | |
| 53 | VK_ACCESS_SHADER_READ_BIT | |
| 54 | VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT; |
| 55 | } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 56 | flags = VK_ACCESS_HOST_WRITE_BIT; |
| 57 | } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) { |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 58 | flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 59 | } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) { |
| 60 | flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; |
| 61 | } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 62 | flags = VK_ACCESS_TRANSFER_WRITE_BIT; |
| 63 | } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout) { |
| 64 | flags = VK_ACCESS_TRANSFER_READ_BIT; |
| 65 | } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 66 | flags = VK_ACCESS_SHADER_READ_BIT; |
| 67 | } |
| 68 | return flags; |
| 69 | } |
| 70 | |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 71 | VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) { |
| 72 | switch (format) { |
| 73 | case VK_FORMAT_S8_UINT: |
| 74 | return VK_IMAGE_ASPECT_STENCIL_BIT; |
| 75 | case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough |
| 76 | case VK_FORMAT_D32_SFLOAT_S8_UINT: |
| 77 | return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; |
| 78 | default: |
Greg Daniel | 81b8059 | 2017-12-13 10:20:04 -0500 | [diff] [blame] | 79 | SkASSERT(GrVkFormatIsSupported(format)); |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 80 | return VK_IMAGE_ASPECT_COLOR_BIT; |
| 81 | } |
| 82 | } |
| 83 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 84 | void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 85 | VkAccessFlags dstAccessMask, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 86 | VkPipelineStageFlags dstStageMask, |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 87 | bool byRegion, bool releaseFamilyQueue) { |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 88 | SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout && |
| 89 | VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 90 | VkImageLayout currentLayout = this->currentLayout(); |
egdaniel | 19ff103 | 2016-08-31 10:13:08 -0700 | [diff] [blame] | 91 | |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 92 | if (releaseFamilyQueue && fInfo.fCurrentQueueFamily == fInitialQueueFamily) { |
| 93 | // We never transfered the image to this queue and we are releasing it so don't do anything. |
| 94 | return; |
| 95 | } |
| 96 | |
Greg Daniel | a3b6a55 | 2017-03-21 09:48:44 -0400 | [diff] [blame] | 97 | // If the old and new layout are the same and the layout is a read only layout, there is no need |
| 98 | // to put in a barrier. |
| 99 | if (newLayout == currentLayout && |
| 100 | (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout || |
| 101 | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout || |
| 102 | VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 103 | return; |
| 104 | } |
jvanverth | 50c46c7 | 2016-05-06 12:31:28 -0700 | [diff] [blame] | 105 | |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 106 | VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout); |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 107 | VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(currentLayout); |
jvanverth | 50c46c7 | 2016-05-06 12:31:28 -0700 | [diff] [blame] | 108 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 109 | VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat); |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 110 | |
| 111 | uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 112 | uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 113 | if (fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_IGNORED && |
| 114 | gpu->queueIndex() != fInfo.fCurrentQueueFamily) { |
| 115 | // The image still is owned by its original queue family and we need to transfer it into |
| 116 | // ours. |
| 117 | SkASSERT(!releaseFamilyQueue); |
| 118 | SkASSERT(fInfo.fCurrentQueueFamily == fInitialQueueFamily); |
| 119 | |
| 120 | srcQueueFamilyIndex = fInfo.fCurrentQueueFamily; |
| 121 | dstQueueFamilyIndex = gpu->queueIndex(); |
| 122 | fInfo.fCurrentQueueFamily = gpu->queueIndex(); |
| 123 | } else if (releaseFamilyQueue) { |
| 124 | // We are releasing the image so we must transfer the image back to its original queue |
| 125 | // family. |
| 126 | SkASSERT(fInfo.fCurrentQueueFamily == gpu->queueIndex()); |
| 127 | srcQueueFamilyIndex = fInfo.fCurrentQueueFamily; |
| 128 | dstQueueFamilyIndex = fInitialQueueFamily; |
| 129 | fInfo.fCurrentQueueFamily = fInitialQueueFamily; |
| 130 | } |
| 131 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 132 | VkImageMemoryBarrier imageMemoryBarrier = { |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 133 | VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType |
egdaniel | 3602d4f | 2016-08-12 11:58:53 -0700 | [diff] [blame] | 134 | nullptr, // pNext |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 135 | srcAccessMask, // outputMask |
| 136 | dstAccessMask, // inputMask |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 137 | currentLayout, // oldLayout |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 138 | newLayout, // newLayout |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 139 | srcQueueFamilyIndex, // srcQueueFamilyIndex |
| 140 | dstQueueFamilyIndex, // dstQueueFamilyIndex |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 141 | fInfo.fImage, // image |
| 142 | { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 143 | }; |
| 144 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 145 | gpu->addImageMemoryBarrier(srcStageMask, dstStageMask, byRegion, &imageMemoryBarrier); |
| 146 | |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 147 | this->updateImageLayout(newLayout); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 148 | } |
| 149 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 150 | bool GrVkImage::InitImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) { |
egdaniel | ab527a5 | 2016-06-28 08:07:26 -0700 | [diff] [blame] | 151 | if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) { |
| 152 | return false; |
| 153 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 154 | VkImage image = 0; |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 155 | GrVkAlloc alloc; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 156 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 157 | bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling; |
| 158 | VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED |
| 159 | : VK_IMAGE_LAYOUT_UNDEFINED; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 160 | |
| 161 | // Create Image |
| 162 | VkSampleCountFlagBits vkSamples; |
| 163 | if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) { |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 164 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 165 | } |
egdaniel | 8f1dcaa | 2016-04-01 10:10:45 -0700 | [diff] [blame] | 166 | |
| 167 | SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling || |
| 168 | VK_SAMPLE_COUNT_1_BIT == vkSamples); |
| 169 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 170 | const VkImageCreateInfo imageCreateInfo = { |
| 171 | VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType |
Ben Wagner | a93a14a | 2017-08-28 10:34:05 -0400 | [diff] [blame] | 172 | nullptr, // pNext |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 173 | 0, // VkImageCreateFlags |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 174 | imageDesc.fImageType, // VkImageType |
| 175 | imageDesc.fFormat, // VkFormat |
| 176 | { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D |
| 177 | imageDesc.fLevels, // mipLevels |
| 178 | 1, // arrayLayers |
| 179 | vkSamples, // samples |
| 180 | imageDesc.fImageTiling, // VkImageTiling |
| 181 | imageDesc.fUsageFlags, // VkImageUsageFlags |
| 182 | VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode |
| 183 | 0, // queueFamilyCount |
| 184 | 0, // pQueueFamilyIndices |
| 185 | initialLayout // initialLayout |
| 186 | }; |
| 187 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 188 | GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCreateInfo, nullptr, |
| 189 | &image)); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 190 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 191 | if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, isLinear, &alloc)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 192 | VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr)); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 193 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 194 | } |
| 195 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 196 | info->fImage = image; |
| 197 | info->fAlloc = alloc; |
| 198 | info->fImageTiling = imageDesc.fImageTiling; |
| 199 | info->fImageLayout = initialLayout; |
| 200 | info->fFormat = imageDesc.fFormat; |
| 201 | info->fLevelCount = imageDesc.fLevels; |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 202 | info->fCurrentQueueFamily = VK_QUEUE_FAMILY_IGNORED; |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 203 | return true; |
| 204 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 205 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 206 | void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) { |
| 207 | VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 208 | bool isLinear = VK_IMAGE_TILING_LINEAR == info->fImageTiling; |
| 209 | GrVkMemory::FreeImageMemory(gpu, isLinear, info->fAlloc); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 210 | } |
| 211 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 212 | GrVkImage::~GrVkImage() { |
| 213 | // should have been released or abandoned first |
| 214 | SkASSERT(!fResource); |
| 215 | } |
| 216 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 217 | void GrVkImage::releaseImage(GrVkGpu* gpu) { |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 218 | if (fInfo.fCurrentQueueFamily != fInitialQueueFamily) { |
Greg Daniel | 950dfd7 | 2019-02-04 11:16:37 -0500 | [diff] [blame] | 219 | // The Vulkan spec is vague on what to put for the dstStageMask here. The spec for image |
| 220 | // memory barrier says the dstStageMask must not be zero. However, in the spec when it talks |
| 221 | // about family queue transfers it says the dstStageMask is ignored and should be set to |
| 222 | // zero. Assuming it really is ignored we set it to VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT here |
| 223 | // since it makes the Vulkan validation layers happy. |
| 224 | this->setImageLayout(gpu, this->currentLayout(), 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, |
| 225 | false, true); |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 226 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 227 | if (fResource) { |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 228 | fResource->removeOwningTexture(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 229 | fResource->unref(gpu); |
| 230 | fResource = nullptr; |
| 231 | } |
| 232 | } |
| 233 | |
| 234 | void GrVkImage::abandonImage() { |
| 235 | if (fResource) { |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 236 | fResource->removeOwningTexture(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 237 | fResource->unrefAndAbandon(); |
| 238 | fResource = nullptr; |
| 239 | } |
| 240 | } |
| 241 | |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 242 | void GrVkImage::setResourceRelease(sk_sp<GrReleaseProcHelper> releaseHelper) { |
Greg Daniel | b46add8 | 2019-01-02 14:51:29 -0500 | [diff] [blame] | 243 | SkASSERT(fResource); |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 244 | // Forward the release proc on to GrVkImage::Resource |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 245 | fResource->setRelease(std::move(releaseHelper)); |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 246 | } |
| 247 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 248 | void GrVkImage::Resource::freeGPUData(GrVkGpu* gpu) const { |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 249 | SkASSERT(!fReleaseHelper); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 250 | VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 251 | bool isLinear = (VK_IMAGE_TILING_LINEAR == fImageTiling); |
| 252 | GrVkMemory::FreeImageMemory(gpu, isLinear, fAlloc); |
jvanverth | 0fcfb75 | 2016-03-09 09:57:52 -0800 | [diff] [blame] | 253 | } |
jvanverth | fe170d2 | 2016-03-22 13:15:44 -0700 | [diff] [blame] | 254 | |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 255 | void GrVkImage::Resource::setIdleProc(GrVkTexture* owner, GrTexture::IdleProc proc, |
| 256 | void* context) const { |
| 257 | fOwningTexture = owner; |
| 258 | fIdleProc = proc; |
| 259 | fIdleProcContext = context; |
| 260 | } |
| 261 | |
| 262 | void GrVkImage::Resource::removeOwningTexture() const { fOwningTexture = nullptr; } |
| 263 | |
| 264 | void GrVkImage::Resource::notifyAddedToCommandBuffer() const { ++fNumCommandBufferOwners; } |
| 265 | |
| 266 | void GrVkImage::Resource::notifyRemovedFromCommandBuffer() const { |
| 267 | SkASSERT(fNumCommandBufferOwners); |
| 268 | if (--fNumCommandBufferOwners || !fIdleProc) { |
| 269 | return; |
| 270 | } |
Brian Salomon | 9bc76d9 | 2019-01-24 12:18:33 -0500 | [diff] [blame] | 271 | if (fOwningTexture && fOwningTexture->resourcePriv().hasRefOrPendingIO()) { |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 272 | return; |
| 273 | } |
| 274 | fIdleProc(fIdleProcContext); |
| 275 | if (fOwningTexture) { |
| 276 | fOwningTexture->setIdleProc(nullptr, nullptr); |
| 277 | // Changing the texture's proc should change ours. |
| 278 | SkASSERT(!fIdleProc); |
| 279 | SkASSERT(!fIdleProc); |
| 280 | } else { |
| 281 | fIdleProc = nullptr; |
| 282 | fIdleProcContext = nullptr; |
| 283 | } |
| 284 | } |
| 285 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 286 | void GrVkImage::BorrowedResource::freeGPUData(GrVkGpu* gpu) const { |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 287 | this->invokeReleaseProc(); |
jvanverth | fe170d2 | 2016-03-22 13:15:44 -0700 | [diff] [blame] | 288 | } |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 289 | |
| 290 | void GrVkImage::BorrowedResource::abandonGPUData() const { |
| 291 | this->invokeReleaseProc(); |
| 292 | } |
| 293 | |