blob: c9c7a7fb22d53f4a2397951f3e65757e9232fcb0 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkImage.h"
Greg Daniel2bc96d62021-09-13 13:08:02 -04009
10#include "src/gpu/vk/GrVkGpu.h"
11#include "src/gpu/vk/GrVkImageView.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/vk/GrVkMemory.h"
13#include "src/gpu/vk/GrVkTexture.h"
14#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050015
16#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
17
Greg Daniel2bc96d62021-09-13 13:08:02 -040018sk_sp<GrVkImage> GrVkImage::MakeStencil(GrVkGpu* gpu,
19 SkISize dimensions,
20 int sampleCnt,
21 VkFormat format) {
22 VkImageUsageFlags vkUsageFlags =
23 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
24 return GrVkImage::Make(gpu,
25 dimensions,
26 UsageFlags::kStencilAttachment,
27 sampleCnt,
28 format,
29 /*mipLevels=*/1,
30 vkUsageFlags,
31 GrProtected::kNo,
Greg Daniel77435592021-09-22 13:55:44 -040032 GrMemoryless::kNo,
Greg Daniel2bc96d62021-09-13 13:08:02 -040033 SkBudgeted::kYes);
34}
35
36sk_sp<GrVkImage> GrVkImage::MakeMSAA(GrVkGpu* gpu,
37 SkISize dimensions,
38 int numSamples,
39 VkFormat format,
Greg Daniel77435592021-09-22 13:55:44 -040040 GrProtected isProtected,
41 GrMemoryless memoryless) {
Greg Daniel2bc96d62021-09-13 13:08:02 -040042 SkASSERT(numSamples > 1);
43
Greg Daniel77435592021-09-22 13:55:44 -040044 VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
45 if (memoryless == GrMemoryless::kYes) {
46 vkUsageFlags |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
47 } else {
48 vkUsageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
49 }
Greg Daniel2bc96d62021-09-13 13:08:02 -040050 return GrVkImage::Make(gpu,
51 dimensions,
52 UsageFlags::kColorAttachment,
53 numSamples,
54 format,
55 /*mipLevels=*/1,
56 vkUsageFlags,
57 isProtected,
Greg Daniel77435592021-09-22 13:55:44 -040058 memoryless,
Greg Daniel2bc96d62021-09-13 13:08:02 -040059 SkBudgeted::kYes);
60}
61
62sk_sp<GrVkImage> GrVkImage::MakeTexture(GrVkGpu* gpu,
63 SkISize dimensions,
64 VkFormat format,
65 uint32_t mipLevels,
66 GrRenderable renderable,
67 int numSamples,
68 SkBudgeted budgeted,
69 GrProtected isProtected) {
70 UsageFlags usageFlags = UsageFlags::kTexture;
71 VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
72 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
73 if (renderable == GrRenderable::kYes) {
74 usageFlags |= UsageFlags::kColorAttachment;
75 vkUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
76 // We always make our render targets support being used as input attachments
77 vkUsageFlags |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
78 }
79
80 return GrVkImage::Make(gpu,
81 dimensions,
82 usageFlags,
83 numSamples,
84 format,
85 mipLevels,
86 vkUsageFlags,
87 isProtected,
Greg Daniel77435592021-09-22 13:55:44 -040088 GrMemoryless::kNo,
Greg Daniel2bc96d62021-09-13 13:08:02 -040089 budgeted);
90}
91
92static bool make_views(GrVkGpu* gpu,
93 const GrVkImageInfo& info,
94 GrAttachment::UsageFlags attachmentUsages,
95 sk_sp<const GrVkImageView>* framebufferView,
96 sk_sp<const GrVkImageView>* textureView) {
97 GrVkImageView::Type viewType;
98 if (attachmentUsages & GrAttachment::UsageFlags::kStencilAttachment) {
99 // If we have stencil usage then we shouldn't have any other usages
100 SkASSERT(attachmentUsages == GrAttachment::UsageFlags::kStencilAttachment);
101 viewType = GrVkImageView::kStencil_Type;
102 } else {
103 viewType = GrVkImageView::kColor_Type;
104 }
105
106 if (SkToBool(attachmentUsages & GrAttachment::UsageFlags::kStencilAttachment) ||
107 SkToBool(attachmentUsages & GrAttachment::UsageFlags::kColorAttachment)) {
108 // Attachments can only have a mip level of 1
109 *framebufferView = GrVkImageView::Make(
110 gpu, info.fImage, info.fFormat, viewType, 1, info.fYcbcrConversionInfo);
111 if (!*framebufferView) {
112 return false;
113 }
114 }
115
116 if (attachmentUsages & GrAttachment::UsageFlags::kTexture) {
117 *textureView = GrVkImageView::Make(gpu,
118 info.fImage,
119 info.fFormat,
120 viewType,
121 info.fLevelCount,
122 info.fYcbcrConversionInfo);
123 if (!*textureView) {
124 return false;
125 }
126 }
127 return true;
128}
129
130sk_sp<GrVkImage> GrVkImage::Make(GrVkGpu* gpu,
131 SkISize dimensions,
132 UsageFlags attachmentUsages,
133 int sampleCnt,
134 VkFormat format,
135 uint32_t mipLevels,
136 VkImageUsageFlags vkUsageFlags,
137 GrProtected isProtected,
Greg Daniel77435592021-09-22 13:55:44 -0400138 GrMemoryless memoryless,
Greg Daniel2bc96d62021-09-13 13:08:02 -0400139 SkBudgeted budgeted) {
140 GrVkImage::ImageDesc imageDesc;
141 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
142 imageDesc.fFormat = format;
143 imageDesc.fWidth = dimensions.width();
144 imageDesc.fHeight = dimensions.height();
145 imageDesc.fLevels = mipLevels;
146 imageDesc.fSamples = sampleCnt;
147 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
148 imageDesc.fUsageFlags = vkUsageFlags;
149 imageDesc.fIsProtected = isProtected;
150
151 GrVkImageInfo info;
152 if (!GrVkImage::InitImageInfo(gpu, imageDesc, &info)) {
153 return nullptr;
154 }
155
156 sk_sp<const GrVkImageView> framebufferView;
157 sk_sp<const GrVkImageView> textureView;
158 if (!make_views(gpu, info, attachmentUsages, &framebufferView, &textureView)) {
159 GrVkImage::DestroyImageInfo(gpu, &info);
160 return nullptr;
161 }
162
163 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState(
164 new GrBackendSurfaceMutableStateImpl(info.fImageLayout, info.fCurrentQueueFamily));
165 return sk_sp<GrVkImage>(new GrVkImage(gpu,
166 dimensions,
167 attachmentUsages,
168 info,
169 std::move(mutableState),
170 std::move(framebufferView),
171 std::move(textureView),
172 budgeted));
173}
174
175sk_sp<GrVkImage> GrVkImage::MakeWrapped(GrVkGpu* gpu,
176 SkISize dimensions,
177 const GrVkImageInfo& info,
178 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState,
179 UsageFlags attachmentUsages,
180 GrWrapOwnership ownership,
181 GrWrapCacheable cacheable,
182 bool forSecondaryCB) {
183 sk_sp<const GrVkImageView> framebufferView;
184 sk_sp<const GrVkImageView> textureView;
185 if (!forSecondaryCB) {
186 if (!make_views(gpu, info, attachmentUsages, &framebufferView, &textureView)) {
187 return nullptr;
188 }
189 }
190
191 GrBackendObjectOwnership backendOwnership = kBorrow_GrWrapOwnership == ownership
192 ? GrBackendObjectOwnership::kBorrowed
193 : GrBackendObjectOwnership::kOwned;
194
195 return sk_sp<GrVkImage>(new GrVkImage(gpu,
196 dimensions,
197 attachmentUsages,
198 info,
199 std::move(mutableState),
200 std::move(framebufferView),
201 std::move(textureView),
202 backendOwnership,
203 cacheable,
204 forSecondaryCB));
205}
206
207GrVkImage::GrVkImage(GrVkGpu* gpu,
208 SkISize dimensions,
209 UsageFlags supportedUsages,
Greg Danielaa9d99f2020-06-02 11:10:41 -0400210 const GrVkImageInfo& info,
211 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState,
Greg Daniel2bc96d62021-09-13 13:08:02 -0400212 sk_sp<const GrVkImageView> framebufferView,
213 sk_sp<const GrVkImageView> textureView,
214 SkBudgeted budgeted)
215 : GrAttachment(gpu,
216 dimensions,
217 supportedUsages,
218 info.fSampleCount,
219 info.fLevelCount > 1 ? GrMipmapped::kYes : GrMipmapped::kNo,
Greg Daniel77435592021-09-22 13:55:44 -0400220 info.fProtected,
221 info.fAlloc.fFlags & GrVkAlloc::kLazilyAllocated_Flag ? GrMemoryless::kYes
222 : GrMemoryless::kNo)
Greg Daniel2bc96d62021-09-13 13:08:02 -0400223 , fInfo(info)
Greg Danielaa9d99f2020-06-02 11:10:41 -0400224 , fInitialQueueFamily(info.fCurrentQueueFamily)
225 , fMutableState(std::move(mutableState))
Greg Daniel2bc96d62021-09-13 13:08:02 -0400226 , fFramebufferView(std::move(framebufferView))
227 , fTextureView(std::move(textureView))
228 , fIsBorrowed(false) {
229 this->init(gpu, false);
230 this->registerWithCache(budgeted);
231}
232
233GrVkImage::GrVkImage(GrVkGpu* gpu,
Greg Daniel77435592021-09-22 13:55:44 -0400234 SkISize dimensions,
235 UsageFlags supportedUsages,
236 const GrVkImageInfo& info,
237 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState,
238 sk_sp<const GrVkImageView> framebufferView,
239 sk_sp<const GrVkImageView> textureView,
240 GrBackendObjectOwnership ownership,
241 GrWrapCacheable cacheable,
242 bool forSecondaryCB)
Greg Daniel2bc96d62021-09-13 13:08:02 -0400243 : GrAttachment(gpu,
244 dimensions,
245 supportedUsages,
246 info.fSampleCount,
247 info.fLevelCount > 1 ? GrMipmapped::kYes : GrMipmapped::kNo,
248 info.fProtected)
249 , fInfo(info)
250 , fInitialQueueFamily(info.fCurrentQueueFamily)
251 , fMutableState(std::move(mutableState))
252 , fFramebufferView(std::move(framebufferView))
253 , fTextureView(std::move(textureView))
Greg Danielaa9d99f2020-06-02 11:10:41 -0400254 , fIsBorrowed(GrBackendObjectOwnership::kBorrowed == ownership) {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400255 this->init(gpu, forSecondaryCB);
256 this->registerWithCacheWrapped(cacheable);
257}
258
259void GrVkImage::init(GrVkGpu* gpu, bool forSecondaryCB) {
Greg Danielaa9d99f2020-06-02 11:10:41 -0400260 SkASSERT(fMutableState->getImageLayout() == fInfo.fImageLayout);
261 SkASSERT(fMutableState->getQueueFamilyIndex() == fInfo.fCurrentQueueFamily);
262#ifdef SK_DEBUG
Greg Daniel2bc96d62021-09-13 13:08:02 -0400263 if (fInfo.fImageUsageFlags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
264 SkASSERT(SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT));
Greg Daniel7b62dca2020-08-21 11:26:12 -0400265 } else {
Greg Daniel77435592021-09-22 13:55:44 -0400266 if (fInfo.fAlloc.fFlags & GrVkAlloc::kLazilyAllocated_Flag) {
267 SkASSERT(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT);
268 SkASSERT(!SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) &&
269 !SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT));
270 } else {
271 SkASSERT(!SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT));
272 SkASSERT(SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) &&
273 SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT));
274 }
Greg Daniel7b62dca2020-08-21 11:26:12 -0400275 }
Greg Danielaa9d99f2020-06-02 11:10:41 -0400276 // We can't transfer from the non graphics queue to the graphics queue since we can't
277 // release the image from the original queue without having that queue. This limits us in terms
278 // of the types of queue indices we can handle.
Greg Daniel2bc96d62021-09-13 13:08:02 -0400279 if (fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_IGNORED &&
280 fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_EXTERNAL &&
281 fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_FOREIGN_EXT) {
282 if (fInfo.fSharingMode == VK_SHARING_MODE_EXCLUSIVE) {
283 if (fInfo.fCurrentQueueFamily != gpu->queueIndex()) {
Greg Danielaa9d99f2020-06-02 11:10:41 -0400284 SkASSERT(false);
285 }
286 } else {
287 SkASSERT(false);
288 }
289 }
290#endif
291 if (forSecondaryCB) {
292 fResource = nullptr;
293 } else if (fIsBorrowed) {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400294 fResource = new BorrowedResource(gpu, fInfo.fImage, fInfo.fAlloc, fInfo.fImageTiling);
Greg Danielaa9d99f2020-06-02 11:10:41 -0400295 } else {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400296 SkASSERT(VK_NULL_HANDLE != fInfo.fAlloc.fMemory);
297 fResource = new Resource(gpu, fInfo.fImage, fInfo.fAlloc, fInfo.fImageTiling);
Greg Danielaa9d99f2020-06-02 11:10:41 -0400298 }
299}
300
Greg Danielf7828d02018-10-09 12:01:32 -0400301VkPipelineStageFlags GrVkImage::LayoutToPipelineSrcStageFlags(const VkImageLayout layout) {
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400302 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
303 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
304 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
305 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
306 return VK_PIPELINE_STAGE_TRANSFER_BIT;
Greg Danielf7828d02018-10-09 12:01:32 -0400307 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
308 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
309 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout ||
310 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout) {
311 return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
312 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
313 return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400314 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
315 return VK_PIPELINE_STAGE_HOST_BIT;
Ethan Nicholas03896ae2019-03-07 16:54:24 -0500316 } else if (VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) {
Greg Danielcb324152019-02-25 11:36:53 -0500317 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400318 }
319
320 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout);
321 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
322}
323
324VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) {
325 // Currently we assume we will never being doing any explict shader writes (this doesn't include
326 // color attachment or depth/stencil writes). So we will ignore the
327 // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT.
328
329 // We can only directly access the host memory if we are in preinitialized or general layout,
330 // and the image is linear.
331 // TODO: Add check for linear here so we are not always adding host to general, and we should
332 // only be in preinitialized if we are linear
Brian Salomon23356442018-11-30 15:33:19 -0500333 VkAccessFlags flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400334 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
335 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
336 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
337 VK_ACCESS_TRANSFER_WRITE_BIT |
Greg Danielb509bbb2020-10-02 13:30:41 -0400338 VK_ACCESS_HOST_WRITE_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400339 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
340 flags = VK_ACCESS_HOST_WRITE_BIT;
341 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
Greg Danielb509bbb2020-10-02 13:30:41 -0400342 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400343 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) {
344 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
345 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
346 flags = VK_ACCESS_TRANSFER_WRITE_BIT;
Greg Danielab79ff92019-03-29 09:48:05 -0400347 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
348 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout ||
349 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) {
350 // There are no writes that need to be made available
Greg Danielcb324152019-02-25 11:36:53 -0500351 flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400352 }
353 return flags;
354}
355
egdaniel58a8d922016-04-21 08:03:10 -0700356VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) {
357 switch (format) {
358 case VK_FORMAT_S8_UINT:
359 return VK_IMAGE_ASPECT_STENCIL_BIT;
360 case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough
361 case VK_FORMAT_D32_SFLOAT_S8_UINT:
362 return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
363 default:
egdaniel58a8d922016-04-21 08:03:10 -0700364 return VK_IMAGE_ASPECT_COLOR_BIT;
365 }
366}
367
Greg Daniel7f3408b2020-06-03 13:31:00 -0400368void GrVkImage::setImageLayoutAndQueueIndex(const GrVkGpu* gpu,
369 VkImageLayout newLayout,
370 VkAccessFlags dstAccessMask,
371 VkPipelineStageFlags dstStageMask,
372 bool byRegion,
373 uint32_t newQueueFamilyIndex) {
Greg Daniel77435592021-09-22 13:55:44 -0400374// Enable the following block to test new devices to confirm their lazy images stay at 0 memory use.
375#if 0
376 if (fInfo.fAlloc.fFlags & GrVkAlloc::kLazilyAllocated_Flag) {
377 VkDeviceSize size;
378 VK_CALL(gpu, GetDeviceMemoryCommitment(gpu->device(), fInfo.fAlloc.fMemory, &size));
379
380 SkDebugf("Lazy Image. This: %p, image: %d, size: %d\n", this, fInfo.fImage, size);
381 }
382#endif
Greg Danielf0e04f02019-12-04 15:17:54 -0500383 SkASSERT(!gpu->isDeviceLost());
Greg Daniel7f3408b2020-06-03 13:31:00 -0400384 SkASSERT(newLayout == this->currentLayout() ||
385 (VK_IMAGE_LAYOUT_UNDEFINED != newLayout &&
386 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout));
egdanielb2df0c22016-05-13 11:30:37 -0700387 VkImageLayout currentLayout = this->currentLayout();
Greg Daniel7f3408b2020-06-03 13:31:00 -0400388 uint32_t currentQueueIndex = this->currentQueueFamilyIndex();
egdaniel19ff1032016-08-31 10:13:08 -0700389
Greg Daniel7f3408b2020-06-03 13:31:00 -0400390#ifdef SK_DEBUG
391 if (fInfo.fSharingMode == VK_SHARING_MODE_CONCURRENT) {
392 if (newQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) {
393 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED ||
394 currentQueueIndex == VK_QUEUE_FAMILY_EXTERNAL ||
395 currentQueueIndex == VK_QUEUE_FAMILY_FOREIGN_EXT);
396 } else {
397 SkASSERT(newQueueFamilyIndex == VK_QUEUE_FAMILY_EXTERNAL ||
398 newQueueFamilyIndex == VK_QUEUE_FAMILY_FOREIGN_EXT);
399 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED);
400 }
401 } else {
402 SkASSERT(fInfo.fSharingMode == VK_SHARING_MODE_EXCLUSIVE);
403 if (newQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED ||
404 currentQueueIndex == gpu->queueIndex()) {
405 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED ||
406 currentQueueIndex == VK_QUEUE_FAMILY_EXTERNAL ||
407 currentQueueIndex == VK_QUEUE_FAMILY_FOREIGN_EXT ||
408 currentQueueIndex == gpu->queueIndex());
409 } else if (newQueueFamilyIndex == VK_QUEUE_FAMILY_EXTERNAL ||
410 newQueueFamilyIndex == VK_QUEUE_FAMILY_FOREIGN_EXT) {
411 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED ||
412 currentQueueIndex == gpu->queueIndex());
413 }
414 }
415#endif
416
417 if (fInfo.fSharingMode == VK_SHARING_MODE_EXCLUSIVE) {
418 if (newQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) {
419 newQueueFamilyIndex = gpu->queueIndex();
420 }
421 if (currentQueueIndex == VK_QUEUE_FAMILY_IGNORED) {
422 currentQueueIndex = gpu->queueIndex();
423 }
Greg Danielecddbc02018-08-30 16:39:34 -0400424 }
425
Greg Daniela3b6a552017-03-21 09:48:44 -0400426 // If the old and new layout are the same and the layout is a read only layout, there is no need
Eric Karl3f219cb2019-03-22 17:46:55 -0700427 // to put in a barrier unless we also need to switch queues.
Greg Daniel7f3408b2020-06-03 13:31:00 -0400428 if (newLayout == currentLayout && currentQueueIndex == newQueueFamilyIndex &&
Greg Daniela3b6a552017-03-21 09:48:44 -0400429 (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout ||
430 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout ||
431 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500432 return;
433 }
jvanverth50c46c72016-05-06 12:31:28 -0700434
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400435 VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout);
Greg Danielf7828d02018-10-09 12:01:32 -0400436 VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(currentLayout);
jvanverth50c46c72016-05-06 12:31:28 -0700437
egdanielb2df0c22016-05-13 11:30:37 -0700438 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat);
Greg Danielecddbc02018-08-30 16:39:34 -0400439
Greg Daniel164a9f02016-02-22 09:56:40 -0500440 VkImageMemoryBarrier imageMemoryBarrier = {
jvanverth2af0f1b2016-05-03 10:36:49 -0700441 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
egdaniel3602d4f2016-08-12 11:58:53 -0700442 nullptr, // pNext
Robert Phillipsd1d869d2019-06-07 14:21:31 -0400443 srcAccessMask, // srcAccessMask
444 dstAccessMask, // dstAccessMask
egdanielb2df0c22016-05-13 11:30:37 -0700445 currentLayout, // oldLayout
jvanverth2af0f1b2016-05-03 10:36:49 -0700446 newLayout, // newLayout
Greg Daniel7f3408b2020-06-03 13:31:00 -0400447 currentQueueIndex, // srcQueueFamilyIndex
448 newQueueFamilyIndex, // dstQueueFamilyIndex
egdanielb2df0c22016-05-13 11:30:37 -0700449 fInfo.fImage, // image
450 { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange
Greg Daniel164a9f02016-02-22 09:56:40 -0500451 };
Greg Daniel9a18b082020-08-14 14:03:50 -0400452 SkASSERT(srcAccessMask == imageMemoryBarrier.srcAccessMask);
Greg Daniel59dc1482019-02-22 10:46:38 -0500453 gpu->addImageMemoryBarrier(this->resource(), srcStageMask, dstStageMask, byRegion,
454 &imageMemoryBarrier);
Greg Daniel164a9f02016-02-22 09:56:40 -0500455
Greg Daniel52e16d92018-04-10 09:34:07 -0400456 this->updateImageLayout(newLayout);
Greg Daniel7f3408b2020-06-03 13:31:00 -0400457 this->setQueueFamilyIndex(newQueueFamilyIndex);
Greg Daniel164a9f02016-02-22 09:56:40 -0500458}
459
Greg Daniele643da62019-11-05 12:36:42 -0500460bool GrVkImage::InitImageInfo(GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) {
egdanielab527a52016-06-28 08:07:26 -0700461 if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) {
462 return false;
463 }
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400464 if ((imageDesc.fIsProtected == GrProtected::kYes) && !gpu->vkCaps().supportsProtectedMemory()) {
465 return false;
466 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500467
jvanverth6b6ffc42016-06-13 14:28:07 -0700468 bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling;
469 VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED
470 : VK_IMAGE_LAYOUT_UNDEFINED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500471
472 // Create Image
473 VkSampleCountFlagBits vkSamples;
474 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) {
egdanielb2df0c22016-05-13 11:30:37 -0700475 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500476 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700477
478 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling ||
479 VK_SAMPLE_COUNT_1_BIT == vkSamples);
480
Brian Salomon4456a0d2019-07-18 15:05:11 -0400481 VkImageCreateFlags createflags = 0;
482 if (imageDesc.fIsProtected == GrProtected::kYes || gpu->protectedContext()) {
483 createflags |= VK_IMAGE_CREATE_PROTECTED_BIT;
484 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500485 const VkImageCreateInfo imageCreateInfo = {
486 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400487 nullptr, // pNext
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400488 createflags, // VkImageCreateFlags
Greg Daniel164a9f02016-02-22 09:56:40 -0500489 imageDesc.fImageType, // VkImageType
490 imageDesc.fFormat, // VkFormat
491 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D
492 imageDesc.fLevels, // mipLevels
493 1, // arrayLayers
494 vkSamples, // samples
495 imageDesc.fImageTiling, // VkImageTiling
496 imageDesc.fUsageFlags, // VkImageUsageFlags
497 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
498 0, // queueFamilyCount
John Stilesfe0de302020-08-14 10:52:06 -0400499 nullptr, // pQueueFamilyIndices
Greg Daniel164a9f02016-02-22 09:56:40 -0500500 initialLayout // initialLayout
501 };
502
Greg Daniel77435592021-09-22 13:55:44 -0400503 VkImage image = VK_NULL_HANDLE;
Greg Daniel95f0b162019-11-11 13:42:30 -0500504 VkResult result;
505 GR_VK_CALL_RESULT(gpu, result, CreateImage(gpu->device(), &imageCreateInfo, nullptr, &image));
506 if (result != VK_SUCCESS) {
507 return false;
508 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500509
Greg Daniel77435592021-09-22 13:55:44 -0400510 GrMemoryless memoryless = imageDesc.fUsageFlags & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT
511 ? GrMemoryless::kYes
512 : GrMemoryless::kNo;
513 GrVkAlloc alloc;
514 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, memoryless, &alloc) ||
515 (memoryless == GrMemoryless::kYes &&
516 !SkToBool(alloc.fFlags & GrVkAlloc::kLazilyAllocated_Flag))) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500517 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr));
egdanielb2df0c22016-05-13 11:30:37 -0700518 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500519 }
520
egdanielb2df0c22016-05-13 11:30:37 -0700521 info->fImage = image;
522 info->fAlloc = alloc;
523 info->fImageTiling = imageDesc.fImageTiling;
524 info->fImageLayout = initialLayout;
525 info->fFormat = imageDesc.fFormat;
Greg Daniel7b62dca2020-08-21 11:26:12 -0400526 info->fImageUsageFlags = imageDesc.fUsageFlags;
Brian Salomon72c7b982020-10-06 10:07:38 -0400527 info->fSampleCount = imageDesc.fSamples;
egdanielb2df0c22016-05-13 11:30:37 -0700528 info->fLevelCount = imageDesc.fLevels;
Greg Danielecddbc02018-08-30 16:39:34 -0400529 info->fCurrentQueueFamily = VK_QUEUE_FAMILY_IGNORED;
Brian Salomon4456a0d2019-07-18 15:05:11 -0400530 info->fProtected =
531 (createflags & VK_IMAGE_CREATE_PROTECTED_BIT) ? GrProtected::kYes : GrProtected::kNo;
Greg Danielaa9d99f2020-06-02 11:10:41 -0400532 info->fSharingMode = VK_SHARING_MODE_EXCLUSIVE;
egdanielb2df0c22016-05-13 11:30:37 -0700533 return true;
534}
Greg Daniel164a9f02016-02-22 09:56:40 -0500535
egdanielb2df0c22016-05-13 11:30:37 -0700536void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) {
537 VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr));
Greg Daniel6763e7c2021-09-07 10:46:22 -0400538 GrVkMemory::FreeImageMemory(gpu, info->fAlloc);
egdanielb2df0c22016-05-13 11:30:37 -0700539}
540
Greg Daniel164a9f02016-02-22 09:56:40 -0500541GrVkImage::~GrVkImage() {
Greg Danielf0e04f02019-12-04 15:17:54 -0500542 // should have been released first
Greg Daniel164a9f02016-02-22 09:56:40 -0500543 SkASSERT(!fResource);
Greg Daniel2bc96d62021-09-13 13:08:02 -0400544 SkASSERT(!fFramebufferView);
545 SkASSERT(!fTextureView);
Greg Daniel164a9f02016-02-22 09:56:40 -0500546}
547
Greg Danielbae71212019-03-01 15:24:35 -0500548void GrVkImage::prepareForPresent(GrVkGpu* gpu) {
549 VkImageLayout layout = this->currentLayout();
550 if (fInitialQueueFamily != VK_QUEUE_FAMILY_EXTERNAL &&
551 fInitialQueueFamily != VK_QUEUE_FAMILY_FOREIGN_EXT) {
552 if (gpu->vkCaps().supportsSwapchain()) {
553 layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
554 }
555 }
Greg Daniel7f3408b2020-06-03 13:31:00 -0400556 this->setImageLayoutAndQueueIndex(gpu, layout, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false,
557 fInitialQueueFamily);
Greg Danielbae71212019-03-01 15:24:35 -0500558}
559
Greg Daniel797efca2019-05-09 14:04:20 -0400560void GrVkImage::prepareForExternal(GrVkGpu* gpu) {
Greg Daniel7f3408b2020-06-03 13:31:00 -0400561 this->setImageLayoutAndQueueIndex(gpu, this->currentLayout(), 0,
562 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false,
563 fInitialQueueFamily);
Greg Daniel797efca2019-05-09 14:04:20 -0400564}
565
Greg Daniel03535f42020-06-05 14:18:42 -0400566void GrVkImage::releaseImage() {
Greg Daniel164a9f02016-02-22 09:56:40 -0500567 if (fResource) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400568 fResource->unref();
Greg Daniel164a9f02016-02-22 09:56:40 -0500569 fResource = nullptr;
570 }
Greg Daniel2bc96d62021-09-13 13:08:02 -0400571 fFramebufferView.reset();
572 fTextureView.reset();
573 fCachedBlendingInputDescSet.reset();
574 fCachedMSAALoadInputDescSet.reset();
575}
576
577void GrVkImage::onRelease() {
578 this->releaseImage();
579 GrAttachment::onRelease();
580}
581
582void GrVkImage::onAbandon() {
583 this->releaseImage();
584 GrAttachment::onAbandon();
Greg Daniel164a9f02016-02-22 09:56:40 -0500585}
586
Brian Salomonb2c5dae2019-03-04 10:25:17 -0500587void GrVkImage::setResourceRelease(sk_sp<GrRefCntedCallback> releaseHelper) {
Greg Danielb46add82019-01-02 14:51:29 -0500588 SkASSERT(fResource);
Greg Danielcef213c2017-04-21 11:52:27 -0400589 // Forward the release proc on to GrVkImage::Resource
Greg Daniel6a0176b2018-01-30 09:28:44 -0500590 fResource->setRelease(std::move(releaseHelper));
Greg Danielcef213c2017-04-21 11:52:27 -0400591}
592
Jim Van Verth5082df12020-03-11 16:14:51 -0400593void GrVkImage::Resource::freeGPUData() const {
Brian Salomon8cabb322019-02-22 10:44:19 -0500594 this->invokeReleaseProc();
Jim Van Verth5082df12020-03-11 16:14:51 -0400595 VK_CALL(fGpu, DestroyImage(fGpu->device(), fImage, nullptr));
Greg Daniel6763e7c2021-09-07 10:46:22 -0400596 GrVkMemory::FreeImageMemory(fGpu, fAlloc);
jvanverth0fcfb752016-03-09 09:57:52 -0800597}
jvanverthfe170d22016-03-22 13:15:44 -0700598
Jim Van Verth5082df12020-03-11 16:14:51 -0400599void GrVkImage::BorrowedResource::freeGPUData() const {
Greg Danielcef213c2017-04-21 11:52:27 -0400600 this->invokeReleaseProc();
jvanverthfe170d22016-03-22 13:15:44 -0700601}
Greg Danielcef213c2017-04-21 11:52:27 -0400602
Greg Daniel2bc96d62021-09-13 13:08:02 -0400603static void write_input_desc_set(GrVkGpu* gpu,
604 VkImageView view,
605 VkImageLayout layout,
606 VkDescriptorSet descSet) {
607 VkDescriptorImageInfo imageInfo;
608 memset(&imageInfo, 0, sizeof(VkDescriptorImageInfo));
609 imageInfo.sampler = VK_NULL_HANDLE;
610 imageInfo.imageView = view;
611 imageInfo.imageLayout = layout;
612
613 VkWriteDescriptorSet writeInfo;
614 memset(&writeInfo, 0, sizeof(VkWriteDescriptorSet));
615 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
616 writeInfo.pNext = nullptr;
617 writeInfo.dstSet = descSet;
618 writeInfo.dstBinding = GrVkUniformHandler::kInputBinding;
619 writeInfo.dstArrayElement = 0;
620 writeInfo.descriptorCount = 1;
621 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
622 writeInfo.pImageInfo = &imageInfo;
623 writeInfo.pBufferInfo = nullptr;
624 writeInfo.pTexelBufferView = nullptr;
625
626 GR_VK_CALL(gpu->vkInterface(), UpdateDescriptorSets(gpu->device(), 1, &writeInfo, 0, nullptr));
627}
628
629gr_rp<const GrVkDescriptorSet> GrVkImage::inputDescSetForBlending(GrVkGpu* gpu) {
630 if (!this->supportsInputAttachmentUsage()) {
631 return nullptr;
632 }
633 if (fCachedBlendingInputDescSet) {
634 return fCachedBlendingInputDescSet;
635 }
636
637 fCachedBlendingInputDescSet.reset(gpu->resourceProvider().getInputDescriptorSet());
638 if (!fCachedBlendingInputDescSet) {
639 return nullptr;
640 }
641
642 write_input_desc_set(gpu,
643 this->framebufferView()->imageView(),
644 VK_IMAGE_LAYOUT_GENERAL,
645 *fCachedBlendingInputDescSet->descriptorSet());
646
647 return fCachedBlendingInputDescSet;
648}
649
650gr_rp<const GrVkDescriptorSet> GrVkImage::inputDescSetForMSAALoad(GrVkGpu* gpu) {
651 if (!this->supportsInputAttachmentUsage()) {
652 return nullptr;
653 }
654 if (fCachedMSAALoadInputDescSet) {
655 return fCachedMSAALoadInputDescSet;
656 }
657
658 fCachedMSAALoadInputDescSet.reset(gpu->resourceProvider().getInputDescriptorSet());
659 if (!fCachedMSAALoadInputDescSet) {
660 return nullptr;
661 }
662
663 write_input_desc_set(gpu,
664 this->framebufferView()->imageView(),
665 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
666 *fCachedMSAALoadInputDescSet->descriptorSet());
667
668 return fCachedMSAALoadInputDescSet;
669}
670
671GrVkGpu* GrVkImage::getVkGpu() const {
672 SkASSERT(!this->wasDestroyed());
673 return static_cast<GrVkGpu*>(this->getGpu());
674}
675
Greg Daniel59dc1482019-02-22 10:46:38 -0500676#if GR_TEST_UTILS
677void GrVkImage::setCurrentQueueFamilyToGraphicsQueue(GrVkGpu* gpu) {
Greg Daniel6c6caf42020-05-29 12:11:05 -0400678 fMutableState->setQueueFamilyIndex(gpu->queueIndex());
Greg Daniel59dc1482019-02-22 10:46:38 -0500679}
680#endif
681