blob: 6a26410fd9fa5598063b281b808303f2c218f590 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkImage.h"
Greg Daniel2bc96d62021-09-13 13:08:02 -04009
10#include "src/gpu/vk/GrVkGpu.h"
11#include "src/gpu/vk/GrVkImageView.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/vk/GrVkMemory.h"
13#include "src/gpu/vk/GrVkTexture.h"
14#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050015
16#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
17
Greg Daniel2bc96d62021-09-13 13:08:02 -040018sk_sp<GrVkImage> GrVkImage::MakeStencil(GrVkGpu* gpu,
19 SkISize dimensions,
20 int sampleCnt,
21 VkFormat format) {
22 VkImageUsageFlags vkUsageFlags =
23 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
24 return GrVkImage::Make(gpu,
25 dimensions,
26 UsageFlags::kStencilAttachment,
27 sampleCnt,
28 format,
29 /*mipLevels=*/1,
30 vkUsageFlags,
31 GrProtected::kNo,
32 SkBudgeted::kYes);
33}
34
35sk_sp<GrVkImage> GrVkImage::MakeMSAA(GrVkGpu* gpu,
36 SkISize dimensions,
37 int numSamples,
38 VkFormat format,
39 GrProtected isProtected) {
40 SkASSERT(numSamples > 1);
41
42 VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
43 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
44 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
45 return GrVkImage::Make(gpu,
46 dimensions,
47 UsageFlags::kColorAttachment,
48 numSamples,
49 format,
50 /*mipLevels=*/1,
51 vkUsageFlags,
52 isProtected,
53 SkBudgeted::kYes);
54}
55
56sk_sp<GrVkImage> GrVkImage::MakeTexture(GrVkGpu* gpu,
57 SkISize dimensions,
58 VkFormat format,
59 uint32_t mipLevels,
60 GrRenderable renderable,
61 int numSamples,
62 SkBudgeted budgeted,
63 GrProtected isProtected) {
64 UsageFlags usageFlags = UsageFlags::kTexture;
65 VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
66 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
67 if (renderable == GrRenderable::kYes) {
68 usageFlags |= UsageFlags::kColorAttachment;
69 vkUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
70 // We always make our render targets support being used as input attachments
71 vkUsageFlags |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
72 }
73
74 return GrVkImage::Make(gpu,
75 dimensions,
76 usageFlags,
77 numSamples,
78 format,
79 mipLevels,
80 vkUsageFlags,
81 isProtected,
82 budgeted);
83}
84
85static bool make_views(GrVkGpu* gpu,
86 const GrVkImageInfo& info,
87 GrAttachment::UsageFlags attachmentUsages,
88 sk_sp<const GrVkImageView>* framebufferView,
89 sk_sp<const GrVkImageView>* textureView) {
90 GrVkImageView::Type viewType;
91 if (attachmentUsages & GrAttachment::UsageFlags::kStencilAttachment) {
92 // If we have stencil usage then we shouldn't have any other usages
93 SkASSERT(attachmentUsages == GrAttachment::UsageFlags::kStencilAttachment);
94 viewType = GrVkImageView::kStencil_Type;
95 } else {
96 viewType = GrVkImageView::kColor_Type;
97 }
98
99 if (SkToBool(attachmentUsages & GrAttachment::UsageFlags::kStencilAttachment) ||
100 SkToBool(attachmentUsages & GrAttachment::UsageFlags::kColorAttachment)) {
101 // Attachments can only have a mip level of 1
102 *framebufferView = GrVkImageView::Make(
103 gpu, info.fImage, info.fFormat, viewType, 1, info.fYcbcrConversionInfo);
104 if (!*framebufferView) {
105 return false;
106 }
107 }
108
109 if (attachmentUsages & GrAttachment::UsageFlags::kTexture) {
110 *textureView = GrVkImageView::Make(gpu,
111 info.fImage,
112 info.fFormat,
113 viewType,
114 info.fLevelCount,
115 info.fYcbcrConversionInfo);
116 if (!*textureView) {
117 return false;
118 }
119 }
120 return true;
121}
122
123sk_sp<GrVkImage> GrVkImage::Make(GrVkGpu* gpu,
124 SkISize dimensions,
125 UsageFlags attachmentUsages,
126 int sampleCnt,
127 VkFormat format,
128 uint32_t mipLevels,
129 VkImageUsageFlags vkUsageFlags,
130 GrProtected isProtected,
131 SkBudgeted budgeted) {
132 GrVkImage::ImageDesc imageDesc;
133 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
134 imageDesc.fFormat = format;
135 imageDesc.fWidth = dimensions.width();
136 imageDesc.fHeight = dimensions.height();
137 imageDesc.fLevels = mipLevels;
138 imageDesc.fSamples = sampleCnt;
139 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
140 imageDesc.fUsageFlags = vkUsageFlags;
141 imageDesc.fIsProtected = isProtected;
142
143 GrVkImageInfo info;
144 if (!GrVkImage::InitImageInfo(gpu, imageDesc, &info)) {
145 return nullptr;
146 }
147
148 sk_sp<const GrVkImageView> framebufferView;
149 sk_sp<const GrVkImageView> textureView;
150 if (!make_views(gpu, info, attachmentUsages, &framebufferView, &textureView)) {
151 GrVkImage::DestroyImageInfo(gpu, &info);
152 return nullptr;
153 }
154
155 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState(
156 new GrBackendSurfaceMutableStateImpl(info.fImageLayout, info.fCurrentQueueFamily));
157 return sk_sp<GrVkImage>(new GrVkImage(gpu,
158 dimensions,
159 attachmentUsages,
160 info,
161 std::move(mutableState),
162 std::move(framebufferView),
163 std::move(textureView),
164 budgeted));
165}
166
167sk_sp<GrVkImage> GrVkImage::MakeWrapped(GrVkGpu* gpu,
168 SkISize dimensions,
169 const GrVkImageInfo& info,
170 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState,
171 UsageFlags attachmentUsages,
172 GrWrapOwnership ownership,
173 GrWrapCacheable cacheable,
174 bool forSecondaryCB) {
175 sk_sp<const GrVkImageView> framebufferView;
176 sk_sp<const GrVkImageView> textureView;
177 if (!forSecondaryCB) {
178 if (!make_views(gpu, info, attachmentUsages, &framebufferView, &textureView)) {
179 return nullptr;
180 }
181 }
182
183 GrBackendObjectOwnership backendOwnership = kBorrow_GrWrapOwnership == ownership
184 ? GrBackendObjectOwnership::kBorrowed
185 : GrBackendObjectOwnership::kOwned;
186
187 return sk_sp<GrVkImage>(new GrVkImage(gpu,
188 dimensions,
189 attachmentUsages,
190 info,
191 std::move(mutableState),
192 std::move(framebufferView),
193 std::move(textureView),
194 backendOwnership,
195 cacheable,
196 forSecondaryCB));
197}
198
199GrVkImage::GrVkImage(GrVkGpu* gpu,
200 SkISize dimensions,
201 UsageFlags supportedUsages,
Greg Danielaa9d99f2020-06-02 11:10:41 -0400202 const GrVkImageInfo& info,
203 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState,
Greg Daniel2bc96d62021-09-13 13:08:02 -0400204 sk_sp<const GrVkImageView> framebufferView,
205 sk_sp<const GrVkImageView> textureView,
206 SkBudgeted budgeted)
207 : GrAttachment(gpu,
208 dimensions,
209 supportedUsages,
210 info.fSampleCount,
211 info.fLevelCount > 1 ? GrMipmapped::kYes : GrMipmapped::kNo,
212 info.fProtected)
213 , fInfo(info)
Greg Danielaa9d99f2020-06-02 11:10:41 -0400214 , fInitialQueueFamily(info.fCurrentQueueFamily)
215 , fMutableState(std::move(mutableState))
Greg Daniel2bc96d62021-09-13 13:08:02 -0400216 , fFramebufferView(std::move(framebufferView))
217 , fTextureView(std::move(textureView))
218 , fIsBorrowed(false) {
219 this->init(gpu, false);
220 this->registerWithCache(budgeted);
221}
222
223GrVkImage::GrVkImage(GrVkGpu* gpu,
224 SkISize dimensions,
225 UsageFlags supportedUsages,
226 const GrVkImageInfo& info,
227 sk_sp<GrBackendSurfaceMutableStateImpl> mutableState,
228 sk_sp<const GrVkImageView> framebufferView,
229 sk_sp<const GrVkImageView> textureView,
230 GrBackendObjectOwnership ownership,
231 GrWrapCacheable cacheable,
232 bool forSecondaryCB)
233 : GrAttachment(gpu,
234 dimensions,
235 supportedUsages,
236 info.fSampleCount,
237 info.fLevelCount > 1 ? GrMipmapped::kYes : GrMipmapped::kNo,
238 info.fProtected)
239 , fInfo(info)
240 , fInitialQueueFamily(info.fCurrentQueueFamily)
241 , fMutableState(std::move(mutableState))
242 , fFramebufferView(std::move(framebufferView))
243 , fTextureView(std::move(textureView))
Greg Danielaa9d99f2020-06-02 11:10:41 -0400244 , fIsBorrowed(GrBackendObjectOwnership::kBorrowed == ownership) {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400245 this->init(gpu, forSecondaryCB);
246 this->registerWithCacheWrapped(cacheable);
247}
248
249void GrVkImage::init(GrVkGpu* gpu, bool forSecondaryCB) {
Greg Danielaa9d99f2020-06-02 11:10:41 -0400250 SkASSERT(fMutableState->getImageLayout() == fInfo.fImageLayout);
251 SkASSERT(fMutableState->getQueueFamilyIndex() == fInfo.fCurrentQueueFamily);
252#ifdef SK_DEBUG
Greg Daniel2bc96d62021-09-13 13:08:02 -0400253 if (fInfo.fImageUsageFlags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
254 SkASSERT(SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT));
Greg Daniel7b62dca2020-08-21 11:26:12 -0400255 } else {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400256 SkASSERT(SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) &&
257 SkToBool(fInfo.fImageUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT));
Greg Daniel7b62dca2020-08-21 11:26:12 -0400258 }
Greg Danielaa9d99f2020-06-02 11:10:41 -0400259 // We can't transfer from the non graphics queue to the graphics queue since we can't
260 // release the image from the original queue without having that queue. This limits us in terms
261 // of the types of queue indices we can handle.
Greg Daniel2bc96d62021-09-13 13:08:02 -0400262 if (fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_IGNORED &&
263 fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_EXTERNAL &&
264 fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_FOREIGN_EXT) {
265 if (fInfo.fSharingMode == VK_SHARING_MODE_EXCLUSIVE) {
266 if (fInfo.fCurrentQueueFamily != gpu->queueIndex()) {
Greg Danielaa9d99f2020-06-02 11:10:41 -0400267 SkASSERT(false);
268 }
269 } else {
270 SkASSERT(false);
271 }
272 }
273#endif
274 if (forSecondaryCB) {
275 fResource = nullptr;
276 } else if (fIsBorrowed) {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400277 fResource = new BorrowedResource(gpu, fInfo.fImage, fInfo.fAlloc, fInfo.fImageTiling);
Greg Danielaa9d99f2020-06-02 11:10:41 -0400278 } else {
Greg Daniel2bc96d62021-09-13 13:08:02 -0400279 SkASSERT(VK_NULL_HANDLE != fInfo.fAlloc.fMemory);
280 fResource = new Resource(gpu, fInfo.fImage, fInfo.fAlloc, fInfo.fImageTiling);
Greg Danielaa9d99f2020-06-02 11:10:41 -0400281 }
282}
283
Greg Danielf7828d02018-10-09 12:01:32 -0400284VkPipelineStageFlags GrVkImage::LayoutToPipelineSrcStageFlags(const VkImageLayout layout) {
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400285 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
286 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
287 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
288 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
289 return VK_PIPELINE_STAGE_TRANSFER_BIT;
Greg Danielf7828d02018-10-09 12:01:32 -0400290 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
291 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
292 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout ||
293 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout) {
294 return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
295 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
296 return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400297 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
298 return VK_PIPELINE_STAGE_HOST_BIT;
Ethan Nicholas03896ae2019-03-07 16:54:24 -0500299 } else if (VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) {
Greg Danielcb324152019-02-25 11:36:53 -0500300 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400301 }
302
303 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout);
304 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
305}
306
307VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) {
308 // Currently we assume we will never being doing any explict shader writes (this doesn't include
309 // color attachment or depth/stencil writes). So we will ignore the
310 // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT.
311
312 // We can only directly access the host memory if we are in preinitialized or general layout,
313 // and the image is linear.
314 // TODO: Add check for linear here so we are not always adding host to general, and we should
315 // only be in preinitialized if we are linear
Brian Salomon23356442018-11-30 15:33:19 -0500316 VkAccessFlags flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400317 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
318 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
319 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
320 VK_ACCESS_TRANSFER_WRITE_BIT |
Greg Danielb509bbb2020-10-02 13:30:41 -0400321 VK_ACCESS_HOST_WRITE_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400322 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
323 flags = VK_ACCESS_HOST_WRITE_BIT;
324 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
Greg Danielb509bbb2020-10-02 13:30:41 -0400325 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400326 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) {
327 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
328 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
329 flags = VK_ACCESS_TRANSFER_WRITE_BIT;
Greg Danielab79ff92019-03-29 09:48:05 -0400330 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
331 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout ||
332 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) {
333 // There are no writes that need to be made available
Greg Danielcb324152019-02-25 11:36:53 -0500334 flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400335 }
336 return flags;
337}
338
egdaniel58a8d922016-04-21 08:03:10 -0700339VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) {
340 switch (format) {
341 case VK_FORMAT_S8_UINT:
342 return VK_IMAGE_ASPECT_STENCIL_BIT;
343 case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough
344 case VK_FORMAT_D32_SFLOAT_S8_UINT:
345 return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
346 default:
egdaniel58a8d922016-04-21 08:03:10 -0700347 return VK_IMAGE_ASPECT_COLOR_BIT;
348 }
349}
350
Greg Daniel7f3408b2020-06-03 13:31:00 -0400351void GrVkImage::setImageLayoutAndQueueIndex(const GrVkGpu* gpu,
352 VkImageLayout newLayout,
353 VkAccessFlags dstAccessMask,
354 VkPipelineStageFlags dstStageMask,
355 bool byRegion,
356 uint32_t newQueueFamilyIndex) {
Greg Danielf0e04f02019-12-04 15:17:54 -0500357 SkASSERT(!gpu->isDeviceLost());
Greg Daniel7f3408b2020-06-03 13:31:00 -0400358 SkASSERT(newLayout == this->currentLayout() ||
359 (VK_IMAGE_LAYOUT_UNDEFINED != newLayout &&
360 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout));
egdanielb2df0c22016-05-13 11:30:37 -0700361 VkImageLayout currentLayout = this->currentLayout();
Greg Daniel7f3408b2020-06-03 13:31:00 -0400362 uint32_t currentQueueIndex = this->currentQueueFamilyIndex();
egdaniel19ff1032016-08-31 10:13:08 -0700363
Greg Daniel7f3408b2020-06-03 13:31:00 -0400364#ifdef SK_DEBUG
365 if (fInfo.fSharingMode == VK_SHARING_MODE_CONCURRENT) {
366 if (newQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) {
367 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED ||
368 currentQueueIndex == VK_QUEUE_FAMILY_EXTERNAL ||
369 currentQueueIndex == VK_QUEUE_FAMILY_FOREIGN_EXT);
370 } else {
371 SkASSERT(newQueueFamilyIndex == VK_QUEUE_FAMILY_EXTERNAL ||
372 newQueueFamilyIndex == VK_QUEUE_FAMILY_FOREIGN_EXT);
373 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED);
374 }
375 } else {
376 SkASSERT(fInfo.fSharingMode == VK_SHARING_MODE_EXCLUSIVE);
377 if (newQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED ||
378 currentQueueIndex == gpu->queueIndex()) {
379 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED ||
380 currentQueueIndex == VK_QUEUE_FAMILY_EXTERNAL ||
381 currentQueueIndex == VK_QUEUE_FAMILY_FOREIGN_EXT ||
382 currentQueueIndex == gpu->queueIndex());
383 } else if (newQueueFamilyIndex == VK_QUEUE_FAMILY_EXTERNAL ||
384 newQueueFamilyIndex == VK_QUEUE_FAMILY_FOREIGN_EXT) {
385 SkASSERT(currentQueueIndex == VK_QUEUE_FAMILY_IGNORED ||
386 currentQueueIndex == gpu->queueIndex());
387 }
388 }
389#endif
390
391 if (fInfo.fSharingMode == VK_SHARING_MODE_EXCLUSIVE) {
392 if (newQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) {
393 newQueueFamilyIndex = gpu->queueIndex();
394 }
395 if (currentQueueIndex == VK_QUEUE_FAMILY_IGNORED) {
396 currentQueueIndex = gpu->queueIndex();
397 }
Greg Danielecddbc02018-08-30 16:39:34 -0400398 }
399
Greg Daniela3b6a552017-03-21 09:48:44 -0400400 // If the old and new layout are the same and the layout is a read only layout, there is no need
Eric Karl3f219cb2019-03-22 17:46:55 -0700401 // to put in a barrier unless we also need to switch queues.
Greg Daniel7f3408b2020-06-03 13:31:00 -0400402 if (newLayout == currentLayout && currentQueueIndex == newQueueFamilyIndex &&
Greg Daniela3b6a552017-03-21 09:48:44 -0400403 (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout ||
404 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout ||
405 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500406 return;
407 }
jvanverth50c46c72016-05-06 12:31:28 -0700408
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400409 VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout);
Greg Danielf7828d02018-10-09 12:01:32 -0400410 VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(currentLayout);
jvanverth50c46c72016-05-06 12:31:28 -0700411
egdanielb2df0c22016-05-13 11:30:37 -0700412 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat);
Greg Danielecddbc02018-08-30 16:39:34 -0400413
Greg Daniel164a9f02016-02-22 09:56:40 -0500414 VkImageMemoryBarrier imageMemoryBarrier = {
jvanverth2af0f1b2016-05-03 10:36:49 -0700415 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
egdaniel3602d4f2016-08-12 11:58:53 -0700416 nullptr, // pNext
Robert Phillipsd1d869d2019-06-07 14:21:31 -0400417 srcAccessMask, // srcAccessMask
418 dstAccessMask, // dstAccessMask
egdanielb2df0c22016-05-13 11:30:37 -0700419 currentLayout, // oldLayout
jvanverth2af0f1b2016-05-03 10:36:49 -0700420 newLayout, // newLayout
Greg Daniel7f3408b2020-06-03 13:31:00 -0400421 currentQueueIndex, // srcQueueFamilyIndex
422 newQueueFamilyIndex, // dstQueueFamilyIndex
egdanielb2df0c22016-05-13 11:30:37 -0700423 fInfo.fImage, // image
424 { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange
Greg Daniel164a9f02016-02-22 09:56:40 -0500425 };
Greg Daniel9a18b082020-08-14 14:03:50 -0400426 SkASSERT(srcAccessMask == imageMemoryBarrier.srcAccessMask);
Greg Daniel59dc1482019-02-22 10:46:38 -0500427 gpu->addImageMemoryBarrier(this->resource(), srcStageMask, dstStageMask, byRegion,
428 &imageMemoryBarrier);
Greg Daniel164a9f02016-02-22 09:56:40 -0500429
Greg Daniel52e16d92018-04-10 09:34:07 -0400430 this->updateImageLayout(newLayout);
Greg Daniel7f3408b2020-06-03 13:31:00 -0400431 this->setQueueFamilyIndex(newQueueFamilyIndex);
Greg Daniel164a9f02016-02-22 09:56:40 -0500432}
433
Greg Daniele643da62019-11-05 12:36:42 -0500434bool GrVkImage::InitImageInfo(GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) {
egdanielab527a52016-06-28 08:07:26 -0700435 if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) {
436 return false;
437 }
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400438 if ((imageDesc.fIsProtected == GrProtected::kYes) && !gpu->vkCaps().supportsProtectedMemory()) {
439 return false;
440 }
Robert Phillipsf62e5752019-05-30 10:36:13 -0400441 VkImage image = VK_NULL_HANDLE;
jvanverth1e305ba2016-06-01 09:39:15 -0700442 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -0500443
jvanverth6b6ffc42016-06-13 14:28:07 -0700444 bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling;
445 VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED
446 : VK_IMAGE_LAYOUT_UNDEFINED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500447
448 // Create Image
449 VkSampleCountFlagBits vkSamples;
450 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) {
egdanielb2df0c22016-05-13 11:30:37 -0700451 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500452 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700453
454 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling ||
455 VK_SAMPLE_COUNT_1_BIT == vkSamples);
456
Brian Salomon4456a0d2019-07-18 15:05:11 -0400457 VkImageCreateFlags createflags = 0;
458 if (imageDesc.fIsProtected == GrProtected::kYes || gpu->protectedContext()) {
459 createflags |= VK_IMAGE_CREATE_PROTECTED_BIT;
460 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500461 const VkImageCreateInfo imageCreateInfo = {
462 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400463 nullptr, // pNext
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400464 createflags, // VkImageCreateFlags
Greg Daniel164a9f02016-02-22 09:56:40 -0500465 imageDesc.fImageType, // VkImageType
466 imageDesc.fFormat, // VkFormat
467 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D
468 imageDesc.fLevels, // mipLevels
469 1, // arrayLayers
470 vkSamples, // samples
471 imageDesc.fImageTiling, // VkImageTiling
472 imageDesc.fUsageFlags, // VkImageUsageFlags
473 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
474 0, // queueFamilyCount
John Stilesfe0de302020-08-14 10:52:06 -0400475 nullptr, // pQueueFamilyIndices
Greg Daniel164a9f02016-02-22 09:56:40 -0500476 initialLayout // initialLayout
477 };
478
Greg Daniel95f0b162019-11-11 13:42:30 -0500479 VkResult result;
480 GR_VK_CALL_RESULT(gpu, result, CreateImage(gpu->device(), &imageCreateInfo, nullptr, &image));
481 if (result != VK_SUCCESS) {
482 return false;
483 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500484
Greg Daniel6763e7c2021-09-07 10:46:22 -0400485 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, &alloc)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500486 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr));
egdanielb2df0c22016-05-13 11:30:37 -0700487 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500488 }
489
egdanielb2df0c22016-05-13 11:30:37 -0700490 info->fImage = image;
491 info->fAlloc = alloc;
492 info->fImageTiling = imageDesc.fImageTiling;
493 info->fImageLayout = initialLayout;
494 info->fFormat = imageDesc.fFormat;
Greg Daniel7b62dca2020-08-21 11:26:12 -0400495 info->fImageUsageFlags = imageDesc.fUsageFlags;
Brian Salomon72c7b982020-10-06 10:07:38 -0400496 info->fSampleCount = imageDesc.fSamples;
egdanielb2df0c22016-05-13 11:30:37 -0700497 info->fLevelCount = imageDesc.fLevels;
Greg Danielecddbc02018-08-30 16:39:34 -0400498 info->fCurrentQueueFamily = VK_QUEUE_FAMILY_IGNORED;
Brian Salomon4456a0d2019-07-18 15:05:11 -0400499 info->fProtected =
500 (createflags & VK_IMAGE_CREATE_PROTECTED_BIT) ? GrProtected::kYes : GrProtected::kNo;
Greg Danielaa9d99f2020-06-02 11:10:41 -0400501 info->fSharingMode = VK_SHARING_MODE_EXCLUSIVE;
egdanielb2df0c22016-05-13 11:30:37 -0700502 return true;
503}
Greg Daniel164a9f02016-02-22 09:56:40 -0500504
egdanielb2df0c22016-05-13 11:30:37 -0700505void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) {
506 VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr));
Greg Daniel6763e7c2021-09-07 10:46:22 -0400507 GrVkMemory::FreeImageMemory(gpu, info->fAlloc);
egdanielb2df0c22016-05-13 11:30:37 -0700508}
509
Greg Daniel164a9f02016-02-22 09:56:40 -0500510GrVkImage::~GrVkImage() {
Greg Danielf0e04f02019-12-04 15:17:54 -0500511 // should have been released first
Greg Daniel164a9f02016-02-22 09:56:40 -0500512 SkASSERT(!fResource);
Greg Daniel2bc96d62021-09-13 13:08:02 -0400513 SkASSERT(!fFramebufferView);
514 SkASSERT(!fTextureView);
Greg Daniel164a9f02016-02-22 09:56:40 -0500515}
516
Greg Danielbae71212019-03-01 15:24:35 -0500517void GrVkImage::prepareForPresent(GrVkGpu* gpu) {
518 VkImageLayout layout = this->currentLayout();
519 if (fInitialQueueFamily != VK_QUEUE_FAMILY_EXTERNAL &&
520 fInitialQueueFamily != VK_QUEUE_FAMILY_FOREIGN_EXT) {
521 if (gpu->vkCaps().supportsSwapchain()) {
522 layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
523 }
524 }
Greg Daniel7f3408b2020-06-03 13:31:00 -0400525 this->setImageLayoutAndQueueIndex(gpu, layout, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false,
526 fInitialQueueFamily);
Greg Danielbae71212019-03-01 15:24:35 -0500527}
528
Greg Daniel797efca2019-05-09 14:04:20 -0400529void GrVkImage::prepareForExternal(GrVkGpu* gpu) {
Greg Daniel7f3408b2020-06-03 13:31:00 -0400530 this->setImageLayoutAndQueueIndex(gpu, this->currentLayout(), 0,
531 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false,
532 fInitialQueueFamily);
Greg Daniel797efca2019-05-09 14:04:20 -0400533}
534
Greg Daniel03535f42020-06-05 14:18:42 -0400535void GrVkImage::releaseImage() {
Greg Daniel164a9f02016-02-22 09:56:40 -0500536 if (fResource) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400537 fResource->unref();
Greg Daniel164a9f02016-02-22 09:56:40 -0500538 fResource = nullptr;
539 }
Greg Daniel2bc96d62021-09-13 13:08:02 -0400540 fFramebufferView.reset();
541 fTextureView.reset();
542 fCachedBlendingInputDescSet.reset();
543 fCachedMSAALoadInputDescSet.reset();
544}
545
546void GrVkImage::onRelease() {
547 this->releaseImage();
548 GrAttachment::onRelease();
549}
550
551void GrVkImage::onAbandon() {
552 this->releaseImage();
553 GrAttachment::onAbandon();
Greg Daniel164a9f02016-02-22 09:56:40 -0500554}
555
Brian Salomonb2c5dae2019-03-04 10:25:17 -0500556void GrVkImage::setResourceRelease(sk_sp<GrRefCntedCallback> releaseHelper) {
Greg Danielb46add82019-01-02 14:51:29 -0500557 SkASSERT(fResource);
Greg Danielcef213c2017-04-21 11:52:27 -0400558 // Forward the release proc on to GrVkImage::Resource
Greg Daniel6a0176b2018-01-30 09:28:44 -0500559 fResource->setRelease(std::move(releaseHelper));
Greg Danielcef213c2017-04-21 11:52:27 -0400560}
561
Jim Van Verth5082df12020-03-11 16:14:51 -0400562void GrVkImage::Resource::freeGPUData() const {
Brian Salomon8cabb322019-02-22 10:44:19 -0500563 this->invokeReleaseProc();
Jim Van Verth5082df12020-03-11 16:14:51 -0400564 VK_CALL(fGpu, DestroyImage(fGpu->device(), fImage, nullptr));
Greg Daniel6763e7c2021-09-07 10:46:22 -0400565 GrVkMemory::FreeImageMemory(fGpu, fAlloc);
jvanverth0fcfb752016-03-09 09:57:52 -0800566}
jvanverthfe170d22016-03-22 13:15:44 -0700567
Jim Van Verth5082df12020-03-11 16:14:51 -0400568void GrVkImage::BorrowedResource::freeGPUData() const {
Greg Danielcef213c2017-04-21 11:52:27 -0400569 this->invokeReleaseProc();
jvanverthfe170d22016-03-22 13:15:44 -0700570}
Greg Danielcef213c2017-04-21 11:52:27 -0400571
Greg Daniel2bc96d62021-09-13 13:08:02 -0400572static void write_input_desc_set(GrVkGpu* gpu,
573 VkImageView view,
574 VkImageLayout layout,
575 VkDescriptorSet descSet) {
576 VkDescriptorImageInfo imageInfo;
577 memset(&imageInfo, 0, sizeof(VkDescriptorImageInfo));
578 imageInfo.sampler = VK_NULL_HANDLE;
579 imageInfo.imageView = view;
580 imageInfo.imageLayout = layout;
581
582 VkWriteDescriptorSet writeInfo;
583 memset(&writeInfo, 0, sizeof(VkWriteDescriptorSet));
584 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
585 writeInfo.pNext = nullptr;
586 writeInfo.dstSet = descSet;
587 writeInfo.dstBinding = GrVkUniformHandler::kInputBinding;
588 writeInfo.dstArrayElement = 0;
589 writeInfo.descriptorCount = 1;
590 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
591 writeInfo.pImageInfo = &imageInfo;
592 writeInfo.pBufferInfo = nullptr;
593 writeInfo.pTexelBufferView = nullptr;
594
595 GR_VK_CALL(gpu->vkInterface(), UpdateDescriptorSets(gpu->device(), 1, &writeInfo, 0, nullptr));
596}
597
598gr_rp<const GrVkDescriptorSet> GrVkImage::inputDescSetForBlending(GrVkGpu* gpu) {
599 if (!this->supportsInputAttachmentUsage()) {
600 return nullptr;
601 }
602 if (fCachedBlendingInputDescSet) {
603 return fCachedBlendingInputDescSet;
604 }
605
606 fCachedBlendingInputDescSet.reset(gpu->resourceProvider().getInputDescriptorSet());
607 if (!fCachedBlendingInputDescSet) {
608 return nullptr;
609 }
610
611 write_input_desc_set(gpu,
612 this->framebufferView()->imageView(),
613 VK_IMAGE_LAYOUT_GENERAL,
614 *fCachedBlendingInputDescSet->descriptorSet());
615
616 return fCachedBlendingInputDescSet;
617}
618
619gr_rp<const GrVkDescriptorSet> GrVkImage::inputDescSetForMSAALoad(GrVkGpu* gpu) {
620 if (!this->supportsInputAttachmentUsage()) {
621 return nullptr;
622 }
623 if (fCachedMSAALoadInputDescSet) {
624 return fCachedMSAALoadInputDescSet;
625 }
626
627 fCachedMSAALoadInputDescSet.reset(gpu->resourceProvider().getInputDescriptorSet());
628 if (!fCachedMSAALoadInputDescSet) {
629 return nullptr;
630 }
631
632 write_input_desc_set(gpu,
633 this->framebufferView()->imageView(),
634 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
635 *fCachedMSAALoadInputDescSet->descriptorSet());
636
637 return fCachedMSAALoadInputDescSet;
638}
639
640GrVkGpu* GrVkImage::getVkGpu() const {
641 SkASSERT(!this->wasDestroyed());
642 return static_cast<GrVkGpu*>(this->getGpu());
643}
644
Greg Daniel59dc1482019-02-22 10:46:38 -0500645#if GR_TEST_UTILS
646void GrVkImage::setCurrentQueueFamilyToGraphicsQueue(GrVkGpu* gpu) {
Greg Daniel6c6caf42020-05-29 12:11:05 -0400647 fMutableState->setQueueFamilyIndex(gpu->queueIndex());
Greg Daniel59dc1482019-02-22 10:46:38 -0500648}
649#endif
650