Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/GrGpuResourcePriv.h" |
| 9 | #include "src/gpu/vk/GrVkGpu.h" |
| 10 | #include "src/gpu/vk/GrVkImage.h" |
| 11 | #include "src/gpu/vk/GrVkMemory.h" |
| 12 | #include "src/gpu/vk/GrVkTexture.h" |
| 13 | #include "src/gpu/vk/GrVkUtil.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 14 | |
| 15 | #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X) |
| 16 | |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 17 | VkPipelineStageFlags GrVkImage::LayoutToPipelineSrcStageFlags(const VkImageLayout layout) { |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 18 | if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 19 | return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; |
| 20 | } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || |
| 21 | VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 22 | return VK_PIPELINE_STAGE_TRANSFER_BIT; |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 23 | } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) { |
| 24 | return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
| 25 | } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout || |
| 26 | VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout) { |
| 27 | return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; |
| 28 | } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) { |
| 29 | return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 30 | } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 31 | return VK_PIPELINE_STAGE_HOST_BIT; |
Ethan Nicholas | 03896ae | 2019-03-07 16:54:24 -0500 | [diff] [blame] | 32 | } else if (VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) { |
Greg Daniel | cb32415 | 2019-02-25 11:36:53 -0500 | [diff] [blame] | 33 | return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 34 | } |
| 35 | |
| 36 | SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout); |
| 37 | return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; |
| 38 | } |
| 39 | |
| 40 | VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) { |
| 41 | // Currently we assume we will never being doing any explict shader writes (this doesn't include |
| 42 | // color attachment or depth/stencil writes). So we will ignore the |
| 43 | // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT. |
| 44 | |
| 45 | // We can only directly access the host memory if we are in preinitialized or general layout, |
| 46 | // and the image is linear. |
| 47 | // TODO: Add check for linear here so we are not always adding host to general, and we should |
| 48 | // only be in preinitialized if we are linear |
Brian Salomon | 2335644 | 2018-11-30 15:33:19 -0500 | [diff] [blame] | 49 | VkAccessFlags flags = 0; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 50 | if (VK_IMAGE_LAYOUT_GENERAL == layout) { |
| 51 | flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | |
| 52 | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | |
| 53 | VK_ACCESS_TRANSFER_WRITE_BIT | |
| 54 | VK_ACCESS_TRANSFER_READ_BIT | |
| 55 | VK_ACCESS_SHADER_READ_BIT | |
| 56 | VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT; |
| 57 | } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) { |
| 58 | flags = VK_ACCESS_HOST_WRITE_BIT; |
| 59 | } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) { |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 60 | flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 61 | } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) { |
| 62 | flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; |
| 63 | } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) { |
| 64 | flags = VK_ACCESS_TRANSFER_WRITE_BIT; |
Greg Daniel | ab79ff9 | 2019-03-29 09:48:05 -0400 | [diff] [blame] | 65 | } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout || |
| 66 | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout || |
| 67 | VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) { |
| 68 | // There are no writes that need to be made available |
Greg Daniel | cb32415 | 2019-02-25 11:36:53 -0500 | [diff] [blame] | 69 | flags = 0; |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 70 | } |
| 71 | return flags; |
| 72 | } |
| 73 | |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 74 | VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) { |
| 75 | switch (format) { |
| 76 | case VK_FORMAT_S8_UINT: |
| 77 | return VK_IMAGE_ASPECT_STENCIL_BIT; |
| 78 | case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough |
| 79 | case VK_FORMAT_D32_SFLOAT_S8_UINT: |
| 80 | return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; |
| 81 | default: |
Greg Daniel | 81b8059 | 2017-12-13 10:20:04 -0500 | [diff] [blame] | 82 | SkASSERT(GrVkFormatIsSupported(format)); |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 83 | return VK_IMAGE_ASPECT_COLOR_BIT; |
| 84 | } |
| 85 | } |
| 86 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 87 | void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 88 | VkAccessFlags dstAccessMask, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 89 | VkPipelineStageFlags dstStageMask, |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 90 | bool byRegion, bool releaseFamilyQueue) { |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 91 | SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout && |
| 92 | VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 93 | VkImageLayout currentLayout = this->currentLayout(); |
egdaniel | 19ff103 | 2016-08-31 10:13:08 -0700 | [diff] [blame] | 94 | |
Greg Daniel | bae7121 | 2019-03-01 15:24:35 -0500 | [diff] [blame] | 95 | if (releaseFamilyQueue && fInfo.fCurrentQueueFamily == fInitialQueueFamily && |
| 96 | newLayout == currentLayout) { |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 97 | // We never transfered the image to this queue and we are releasing it so don't do anything. |
| 98 | return; |
| 99 | } |
| 100 | |
Greg Daniel | a3b6a55 | 2017-03-21 09:48:44 -0400 | [diff] [blame] | 101 | // If the old and new layout are the same and the layout is a read only layout, there is no need |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 102 | // to put in a barrier unless we also need to switch queues. |
| 103 | if (newLayout == currentLayout && !releaseFamilyQueue && |
| 104 | (fInfo.fCurrentQueueFamily == VK_QUEUE_FAMILY_IGNORED || |
| 105 | fInfo.fCurrentQueueFamily == gpu->queueIndex()) && |
Greg Daniel | a3b6a55 | 2017-03-21 09:48:44 -0400 | [diff] [blame] | 106 | (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout || |
| 107 | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout || |
| 108 | VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 109 | return; |
| 110 | } |
jvanverth | 50c46c7 | 2016-05-06 12:31:28 -0700 | [diff] [blame] | 111 | |
Greg Daniel | 6ddbafc | 2018-05-24 12:34:29 -0400 | [diff] [blame] | 112 | VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout); |
Greg Daniel | f7828d0 | 2018-10-09 12:01:32 -0400 | [diff] [blame] | 113 | VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(currentLayout); |
jvanverth | 50c46c7 | 2016-05-06 12:31:28 -0700 | [diff] [blame] | 114 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 115 | VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat); |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 116 | |
| 117 | uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 118 | uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; |
| 119 | if (fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_IGNORED && |
| 120 | gpu->queueIndex() != fInfo.fCurrentQueueFamily) { |
| 121 | // The image still is owned by its original queue family and we need to transfer it into |
| 122 | // ours. |
| 123 | SkASSERT(!releaseFamilyQueue); |
| 124 | SkASSERT(fInfo.fCurrentQueueFamily == fInitialQueueFamily); |
| 125 | |
| 126 | srcQueueFamilyIndex = fInfo.fCurrentQueueFamily; |
| 127 | dstQueueFamilyIndex = gpu->queueIndex(); |
| 128 | fInfo.fCurrentQueueFamily = gpu->queueIndex(); |
| 129 | } else if (releaseFamilyQueue) { |
| 130 | // We are releasing the image so we must transfer the image back to its original queue |
| 131 | // family. |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 132 | srcQueueFamilyIndex = fInfo.fCurrentQueueFamily; |
| 133 | dstQueueFamilyIndex = fInitialQueueFamily; |
| 134 | fInfo.fCurrentQueueFamily = fInitialQueueFamily; |
| 135 | } |
| 136 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 137 | VkImageMemoryBarrier imageMemoryBarrier = { |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 138 | VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType |
egdaniel | 3602d4f | 2016-08-12 11:58:53 -0700 | [diff] [blame] | 139 | nullptr, // pNext |
Robert Phillips | d1d869d | 2019-06-07 14:21:31 -0400 | [diff] [blame] | 140 | srcAccessMask, // srcAccessMask |
| 141 | dstAccessMask, // dstAccessMask |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 142 | currentLayout, // oldLayout |
jvanverth | 2af0f1b | 2016-05-03 10:36:49 -0700 | [diff] [blame] | 143 | newLayout, // newLayout |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 144 | srcQueueFamilyIndex, // srcQueueFamilyIndex |
| 145 | dstQueueFamilyIndex, // dstQueueFamilyIndex |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 146 | fInfo.fImage, // image |
| 147 | { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 148 | }; |
| 149 | |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 150 | gpu->addImageMemoryBarrier(this->resource(), srcStageMask, dstStageMask, byRegion, |
| 151 | &imageMemoryBarrier); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 152 | |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 153 | this->updateImageLayout(newLayout); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 154 | } |
| 155 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 156 | bool GrVkImage::InitImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) { |
egdaniel | ab527a5 | 2016-06-28 08:07:26 -0700 | [diff] [blame] | 157 | if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) { |
| 158 | return false; |
| 159 | } |
Robert Phillips | f62e575 | 2019-05-30 10:36:13 -0400 | [diff] [blame] | 160 | VkImage image = VK_NULL_HANDLE; |
jvanverth | 1e305ba | 2016-06-01 09:39:15 -0700 | [diff] [blame] | 161 | GrVkAlloc alloc; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 162 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 163 | bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling; |
| 164 | VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED |
| 165 | : VK_IMAGE_LAYOUT_UNDEFINED; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 166 | |
| 167 | // Create Image |
| 168 | VkSampleCountFlagBits vkSamples; |
| 169 | if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) { |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 170 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 171 | } |
egdaniel | 8f1dcaa | 2016-04-01 10:10:45 -0700 | [diff] [blame] | 172 | |
| 173 | SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling || |
| 174 | VK_SAMPLE_COUNT_1_BIT == vkSamples); |
| 175 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 176 | const VkImageCreateInfo imageCreateInfo = { |
| 177 | VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType |
Ben Wagner | a93a14a | 2017-08-28 10:34:05 -0400 | [diff] [blame] | 178 | nullptr, // pNext |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 179 | 0, // VkImageCreateFlags |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 180 | imageDesc.fImageType, // VkImageType |
| 181 | imageDesc.fFormat, // VkFormat |
| 182 | { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D |
| 183 | imageDesc.fLevels, // mipLevels |
| 184 | 1, // arrayLayers |
| 185 | vkSamples, // samples |
| 186 | imageDesc.fImageTiling, // VkImageTiling |
| 187 | imageDesc.fUsageFlags, // VkImageUsageFlags |
| 188 | VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode |
| 189 | 0, // queueFamilyCount |
| 190 | 0, // pQueueFamilyIndices |
| 191 | initialLayout // initialLayout |
| 192 | }; |
| 193 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 194 | GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCreateInfo, nullptr, |
| 195 | &image)); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 196 | |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 197 | if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, isLinear, &alloc)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 198 | VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr)); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 199 | return false; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 200 | } |
| 201 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 202 | info->fImage = image; |
| 203 | info->fAlloc = alloc; |
| 204 | info->fImageTiling = imageDesc.fImageTiling; |
| 205 | info->fImageLayout = initialLayout; |
| 206 | info->fFormat = imageDesc.fFormat; |
| 207 | info->fLevelCount = imageDesc.fLevels; |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 208 | info->fCurrentQueueFamily = VK_QUEUE_FAMILY_IGNORED; |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 209 | return true; |
| 210 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 211 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 212 | void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) { |
| 213 | VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 214 | bool isLinear = VK_IMAGE_TILING_LINEAR == info->fImageTiling; |
| 215 | GrVkMemory::FreeImageMemory(gpu, isLinear, info->fAlloc); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 216 | } |
| 217 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 218 | GrVkImage::~GrVkImage() { |
| 219 | // should have been released or abandoned first |
| 220 | SkASSERT(!fResource); |
| 221 | } |
| 222 | |
Greg Daniel | bae7121 | 2019-03-01 15:24:35 -0500 | [diff] [blame] | 223 | void GrVkImage::prepareForPresent(GrVkGpu* gpu) { |
| 224 | VkImageLayout layout = this->currentLayout(); |
| 225 | if (fInitialQueueFamily != VK_QUEUE_FAMILY_EXTERNAL && |
| 226 | fInitialQueueFamily != VK_QUEUE_FAMILY_FOREIGN_EXT) { |
| 227 | if (gpu->vkCaps().supportsSwapchain()) { |
| 228 | layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; |
| 229 | } |
| 230 | } |
| 231 | this->setImageLayout(gpu, layout, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false, true); |
| 232 | } |
| 233 | |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 234 | void GrVkImage::prepareForExternal(GrVkGpu* gpu) { |
| 235 | this->setImageLayout(gpu, this->currentLayout(), 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false, |
| 236 | true); |
| 237 | } |
| 238 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 239 | void GrVkImage::releaseImage(GrVkGpu* gpu) { |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 240 | if (fInfo.fCurrentQueueFamily != fInitialQueueFamily) { |
Greg Daniel | 950dfd7 | 2019-02-04 11:16:37 -0500 | [diff] [blame] | 241 | // The Vulkan spec is vague on what to put for the dstStageMask here. The spec for image |
| 242 | // memory barrier says the dstStageMask must not be zero. However, in the spec when it talks |
| 243 | // about family queue transfers it says the dstStageMask is ignored and should be set to |
| 244 | // zero. Assuming it really is ignored we set it to VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT here |
| 245 | // since it makes the Vulkan validation layers happy. |
| 246 | this->setImageLayout(gpu, this->currentLayout(), 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, |
| 247 | false, true); |
Greg Daniel | ecddbc0 | 2018-08-30 16:39:34 -0400 | [diff] [blame] | 248 | } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 249 | if (fResource) { |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 250 | fResource->removeOwningTexture(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 251 | fResource->unref(gpu); |
| 252 | fResource = nullptr; |
| 253 | } |
| 254 | } |
| 255 | |
| 256 | void GrVkImage::abandonImage() { |
| 257 | if (fResource) { |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 258 | fResource->removeOwningTexture(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 259 | fResource->unrefAndAbandon(); |
| 260 | fResource = nullptr; |
| 261 | } |
| 262 | } |
| 263 | |
Brian Salomon | b2c5dae | 2019-03-04 10:25:17 -0500 | [diff] [blame] | 264 | void GrVkImage::setResourceRelease(sk_sp<GrRefCntedCallback> releaseHelper) { |
Greg Daniel | b46add8 | 2019-01-02 14:51:29 -0500 | [diff] [blame] | 265 | SkASSERT(fResource); |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 266 | // Forward the release proc on to GrVkImage::Resource |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 267 | fResource->setRelease(std::move(releaseHelper)); |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 268 | } |
| 269 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 270 | void GrVkImage::Resource::freeGPUData(GrVkGpu* gpu) const { |
Brian Salomon | 8cabb32 | 2019-02-22 10:44:19 -0500 | [diff] [blame] | 271 | this->invokeReleaseProc(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 272 | VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr)); |
jvanverth | 6b6ffc4 | 2016-06-13 14:28:07 -0700 | [diff] [blame] | 273 | bool isLinear = (VK_IMAGE_TILING_LINEAR == fImageTiling); |
| 274 | GrVkMemory::FreeImageMemory(gpu, isLinear, fAlloc); |
jvanverth | 0fcfb75 | 2016-03-09 09:57:52 -0800 | [diff] [blame] | 275 | } |
jvanverth | fe170d2 | 2016-03-22 13:15:44 -0700 | [diff] [blame] | 276 | |
Brian Salomon | e80b809 | 2019-03-08 13:25:19 -0500 | [diff] [blame] | 277 | void GrVkImage::Resource::addIdleProc(GrVkTexture* owningTexture, |
| 278 | sk_sp<GrRefCntedCallback> idleProc) const { |
| 279 | SkASSERT(!fOwningTexture || fOwningTexture == owningTexture); |
| 280 | fOwningTexture = owningTexture; |
| 281 | fIdleProcs.push_back(std::move(idleProc)); |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 282 | } |
| 283 | |
Brian Salomon | e80b809 | 2019-03-08 13:25:19 -0500 | [diff] [blame] | 284 | int GrVkImage::Resource::idleProcCnt() const { return fIdleProcs.count(); } |
| 285 | |
| 286 | sk_sp<GrRefCntedCallback> GrVkImage::Resource::idleProc(int i) const { return fIdleProcs[i]; } |
| 287 | |
| 288 | void GrVkImage::Resource::resetIdleProcs() const { fIdleProcs.reset(); } |
| 289 | |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 290 | void GrVkImage::Resource::removeOwningTexture() const { fOwningTexture = nullptr; } |
| 291 | |
| 292 | void GrVkImage::Resource::notifyAddedToCommandBuffer() const { ++fNumCommandBufferOwners; } |
| 293 | |
| 294 | void GrVkImage::Resource::notifyRemovedFromCommandBuffer() const { |
| 295 | SkASSERT(fNumCommandBufferOwners); |
Brian Salomon | e80b809 | 2019-03-08 13:25:19 -0500 | [diff] [blame] | 296 | if (--fNumCommandBufferOwners || !fIdleProcs.count()) { |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 297 | return; |
| 298 | } |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 299 | if (fOwningTexture) { |
Brian Salomon | b2c5dae | 2019-03-04 10:25:17 -0500 | [diff] [blame] | 300 | if (fOwningTexture->resourcePriv().hasRefOrPendingIO()) { |
Brian Salomon | e80b809 | 2019-03-08 13:25:19 -0500 | [diff] [blame] | 301 | // Wait for the texture to become idle in the cache to call the procs. |
Brian Salomon | b2c5dae | 2019-03-04 10:25:17 -0500 | [diff] [blame] | 302 | return; |
| 303 | } |
Brian Salomon | e80b809 | 2019-03-08 13:25:19 -0500 | [diff] [blame] | 304 | fOwningTexture->callIdleProcsOnBehalfOfResource(); |
| 305 | } else { |
| 306 | fIdleProcs.reset(); |
Brian Salomon | 614c1a8 | 2018-12-19 15:42:06 -0500 | [diff] [blame] | 307 | } |
| 308 | } |
| 309 | |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 310 | void GrVkImage::BorrowedResource::freeGPUData(GrVkGpu* gpu) const { |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 311 | this->invokeReleaseProc(); |
jvanverth | fe170d2 | 2016-03-22 13:15:44 -0700 | [diff] [blame] | 312 | } |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 313 | |
| 314 | void GrVkImage::BorrowedResource::abandonGPUData() const { |
| 315 | this->invokeReleaseProc(); |
| 316 | } |
| 317 | |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 318 | #if GR_TEST_UTILS |
| 319 | void GrVkImage::setCurrentQueueFamilyToGraphicsQueue(GrVkGpu* gpu) { |
| 320 | fInfo.fCurrentQueueFamily = gpu->queueIndex(); |
| 321 | } |
| 322 | #endif |
| 323 | |