blob: f66c70c2079c1a622602762f8380beb6de35f1d7 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrGpuResourcePriv.h"
9#include "src/gpu/vk/GrVkGpu.h"
10#include "src/gpu/vk/GrVkImage.h"
11#include "src/gpu/vk/GrVkMemory.h"
12#include "src/gpu/vk/GrVkTexture.h"
13#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014
15#define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
16
Greg Danielf7828d02018-10-09 12:01:32 -040017VkPipelineStageFlags GrVkImage::LayoutToPipelineSrcStageFlags(const VkImageLayout layout) {
Greg Daniel6ddbafc2018-05-24 12:34:29 -040018 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
19 return VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
20 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
21 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
22 return VK_PIPELINE_STAGE_TRANSFER_BIT;
Greg Danielf7828d02018-10-09 12:01:32 -040023 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
24 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
25 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout ||
26 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == layout) {
27 return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
28 } else if (VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout) {
29 return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040030 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
31 return VK_PIPELINE_STAGE_HOST_BIT;
Ethan Nicholas03896ae2019-03-07 16:54:24 -050032 } else if (VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) {
Greg Danielcb324152019-02-25 11:36:53 -050033 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040034 }
35
36 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED == layout);
37 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
38}
39
40VkAccessFlags GrVkImage::LayoutToSrcAccessMask(const VkImageLayout layout) {
41 // Currently we assume we will never being doing any explict shader writes (this doesn't include
42 // color attachment or depth/stencil writes). So we will ignore the
43 // VK_MEMORY_OUTPUT_SHADER_WRITE_BIT.
44
45 // We can only directly access the host memory if we are in preinitialized or general layout,
46 // and the image is linear.
47 // TODO: Add check for linear here so we are not always adding host to general, and we should
48 // only be in preinitialized if we are linear
Brian Salomon23356442018-11-30 15:33:19 -050049 VkAccessFlags flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040050 if (VK_IMAGE_LAYOUT_GENERAL == layout) {
51 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
52 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
53 VK_ACCESS_TRANSFER_WRITE_BIT |
54 VK_ACCESS_TRANSFER_READ_BIT |
55 VK_ACCESS_SHADER_READ_BIT |
56 VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_HOST_READ_BIT;
57 } else if (VK_IMAGE_LAYOUT_PREINITIALIZED == layout) {
58 flags = VK_ACCESS_HOST_WRITE_BIT;
59 } else if (VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL == layout) {
Greg Danielf7828d02018-10-09 12:01:32 -040060 flags = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040061 } else if (VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL == layout) {
62 flags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
63 } else if (VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == layout) {
64 flags = VK_ACCESS_TRANSFER_WRITE_BIT;
Greg Danielab79ff92019-03-29 09:48:05 -040065 } else if (VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == layout ||
66 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == layout ||
67 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR == layout) {
68 // There are no writes that need to be made available
Greg Danielcb324152019-02-25 11:36:53 -050069 flags = 0;
Greg Daniel6ddbafc2018-05-24 12:34:29 -040070 }
71 return flags;
72}
73
egdaniel58a8d922016-04-21 08:03:10 -070074VkImageAspectFlags vk_format_to_aspect_flags(VkFormat format) {
75 switch (format) {
76 case VK_FORMAT_S8_UINT:
77 return VK_IMAGE_ASPECT_STENCIL_BIT;
78 case VK_FORMAT_D24_UNORM_S8_UINT: // fallthrough
79 case VK_FORMAT_D32_SFLOAT_S8_UINT:
80 return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
81 default:
Greg Daniel81b80592017-12-13 10:20:04 -050082 SkASSERT(GrVkFormatIsSupported(format));
egdaniel58a8d922016-04-21 08:03:10 -070083 return VK_IMAGE_ASPECT_COLOR_BIT;
84 }
85}
86
Greg Daniel164a9f02016-02-22 09:56:40 -050087void GrVkImage::setImageLayout(const GrVkGpu* gpu, VkImageLayout newLayout,
Greg Daniel164a9f02016-02-22 09:56:40 -050088 VkAccessFlags dstAccessMask,
Greg Daniel164a9f02016-02-22 09:56:40 -050089 VkPipelineStageFlags dstStageMask,
Greg Danielecddbc02018-08-30 16:39:34 -040090 bool byRegion, bool releaseFamilyQueue) {
jvanverth2af0f1b2016-05-03 10:36:49 -070091 SkASSERT(VK_IMAGE_LAYOUT_UNDEFINED != newLayout &&
92 VK_IMAGE_LAYOUT_PREINITIALIZED != newLayout);
egdanielb2df0c22016-05-13 11:30:37 -070093 VkImageLayout currentLayout = this->currentLayout();
egdaniel19ff1032016-08-31 10:13:08 -070094
Greg Danielbae71212019-03-01 15:24:35 -050095 if (releaseFamilyQueue && fInfo.fCurrentQueueFamily == fInitialQueueFamily &&
96 newLayout == currentLayout) {
Greg Danielecddbc02018-08-30 16:39:34 -040097 // We never transfered the image to this queue and we are releasing it so don't do anything.
98 return;
99 }
100
Greg Daniela3b6a552017-03-21 09:48:44 -0400101 // If the old and new layout are the same and the layout is a read only layout, there is no need
Eric Karl3f219cb2019-03-22 17:46:55 -0700102 // to put in a barrier unless we also need to switch queues.
103 if (newLayout == currentLayout && !releaseFamilyQueue &&
104 (fInfo.fCurrentQueueFamily == VK_QUEUE_FAMILY_IGNORED ||
105 fInfo.fCurrentQueueFamily == gpu->queueIndex()) &&
Greg Daniela3b6a552017-03-21 09:48:44 -0400106 (VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL == currentLayout ||
107 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == currentLayout ||
108 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == currentLayout)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500109 return;
110 }
jvanverth50c46c72016-05-06 12:31:28 -0700111
Greg Daniel6ddbafc2018-05-24 12:34:29 -0400112 VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(currentLayout);
Greg Danielf7828d02018-10-09 12:01:32 -0400113 VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(currentLayout);
jvanverth50c46c72016-05-06 12:31:28 -0700114
egdanielb2df0c22016-05-13 11:30:37 -0700115 VkImageAspectFlags aspectFlags = vk_format_to_aspect_flags(fInfo.fFormat);
Greg Danielecddbc02018-08-30 16:39:34 -0400116
117 uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
118 uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
119 if (fInfo.fCurrentQueueFamily != VK_QUEUE_FAMILY_IGNORED &&
120 gpu->queueIndex() != fInfo.fCurrentQueueFamily) {
121 // The image still is owned by its original queue family and we need to transfer it into
122 // ours.
123 SkASSERT(!releaseFamilyQueue);
124 SkASSERT(fInfo.fCurrentQueueFamily == fInitialQueueFamily);
125
126 srcQueueFamilyIndex = fInfo.fCurrentQueueFamily;
127 dstQueueFamilyIndex = gpu->queueIndex();
128 fInfo.fCurrentQueueFamily = gpu->queueIndex();
129 } else if (releaseFamilyQueue) {
130 // We are releasing the image so we must transfer the image back to its original queue
131 // family.
Greg Danielecddbc02018-08-30 16:39:34 -0400132 srcQueueFamilyIndex = fInfo.fCurrentQueueFamily;
133 dstQueueFamilyIndex = fInitialQueueFamily;
134 fInfo.fCurrentQueueFamily = fInitialQueueFamily;
135 }
136
Greg Daniel164a9f02016-02-22 09:56:40 -0500137 VkImageMemoryBarrier imageMemoryBarrier = {
jvanverth2af0f1b2016-05-03 10:36:49 -0700138 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
egdaniel3602d4f2016-08-12 11:58:53 -0700139 nullptr, // pNext
Robert Phillipsd1d869d2019-06-07 14:21:31 -0400140 srcAccessMask, // srcAccessMask
141 dstAccessMask, // dstAccessMask
egdanielb2df0c22016-05-13 11:30:37 -0700142 currentLayout, // oldLayout
jvanverth2af0f1b2016-05-03 10:36:49 -0700143 newLayout, // newLayout
Greg Danielecddbc02018-08-30 16:39:34 -0400144 srcQueueFamilyIndex, // srcQueueFamilyIndex
145 dstQueueFamilyIndex, // dstQueueFamilyIndex
egdanielb2df0c22016-05-13 11:30:37 -0700146 fInfo.fImage, // image
147 { aspectFlags, 0, fInfo.fLevelCount, 0, 1 } // subresourceRange
Greg Daniel164a9f02016-02-22 09:56:40 -0500148 };
149
Greg Daniel59dc1482019-02-22 10:46:38 -0500150 gpu->addImageMemoryBarrier(this->resource(), srcStageMask, dstStageMask, byRegion,
151 &imageMemoryBarrier);
Greg Daniel164a9f02016-02-22 09:56:40 -0500152
Greg Daniel52e16d92018-04-10 09:34:07 -0400153 this->updateImageLayout(newLayout);
Greg Daniel164a9f02016-02-22 09:56:40 -0500154}
155
egdanielb2df0c22016-05-13 11:30:37 -0700156bool GrVkImage::InitImageInfo(const GrVkGpu* gpu, const ImageDesc& imageDesc, GrVkImageInfo* info) {
egdanielab527a52016-06-28 08:07:26 -0700157 if (0 == imageDesc.fWidth || 0 == imageDesc.fHeight) {
158 return false;
159 }
Robert Phillipsf62e5752019-05-30 10:36:13 -0400160 VkImage image = VK_NULL_HANDLE;
jvanverth1e305ba2016-06-01 09:39:15 -0700161 GrVkAlloc alloc;
Greg Daniel164a9f02016-02-22 09:56:40 -0500162
jvanverth6b6ffc42016-06-13 14:28:07 -0700163 bool isLinear = VK_IMAGE_TILING_LINEAR == imageDesc.fImageTiling;
164 VkImageLayout initialLayout = isLinear ? VK_IMAGE_LAYOUT_PREINITIALIZED
165 : VK_IMAGE_LAYOUT_UNDEFINED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500166
167 // Create Image
168 VkSampleCountFlagBits vkSamples;
169 if (!GrSampleCountToVkSampleCount(imageDesc.fSamples, &vkSamples)) {
egdanielb2df0c22016-05-13 11:30:37 -0700170 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500171 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700172
173 SkASSERT(VK_IMAGE_TILING_OPTIMAL == imageDesc.fImageTiling ||
174 VK_SAMPLE_COUNT_1_BIT == vkSamples);
175
Greg Daniel164a9f02016-02-22 09:56:40 -0500176 const VkImageCreateInfo imageCreateInfo = {
177 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400178 nullptr, // pNext
Brian Osman2b23c4b2018-06-01 12:25:08 -0400179 0, // VkImageCreateFlags
Greg Daniel164a9f02016-02-22 09:56:40 -0500180 imageDesc.fImageType, // VkImageType
181 imageDesc.fFormat, // VkFormat
182 { imageDesc.fWidth, imageDesc.fHeight, 1 }, // VkExtent3D
183 imageDesc.fLevels, // mipLevels
184 1, // arrayLayers
185 vkSamples, // samples
186 imageDesc.fImageTiling, // VkImageTiling
187 imageDesc.fUsageFlags, // VkImageUsageFlags
188 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
189 0, // queueFamilyCount
190 0, // pQueueFamilyIndices
191 initialLayout // initialLayout
192 };
193
egdanielb2df0c22016-05-13 11:30:37 -0700194 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateImage(gpu->device(), &imageCreateInfo, nullptr,
195 &image));
Greg Daniel164a9f02016-02-22 09:56:40 -0500196
jvanverth6b6ffc42016-06-13 14:28:07 -0700197 if (!GrVkMemory::AllocAndBindImageMemory(gpu, image, isLinear, &alloc)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500198 VK_CALL(gpu, DestroyImage(gpu->device(), image, nullptr));
egdanielb2df0c22016-05-13 11:30:37 -0700199 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500200 }
201
egdanielb2df0c22016-05-13 11:30:37 -0700202 info->fImage = image;
203 info->fAlloc = alloc;
204 info->fImageTiling = imageDesc.fImageTiling;
205 info->fImageLayout = initialLayout;
206 info->fFormat = imageDesc.fFormat;
207 info->fLevelCount = imageDesc.fLevels;
Greg Danielecddbc02018-08-30 16:39:34 -0400208 info->fCurrentQueueFamily = VK_QUEUE_FAMILY_IGNORED;
egdanielb2df0c22016-05-13 11:30:37 -0700209 return true;
210}
Greg Daniel164a9f02016-02-22 09:56:40 -0500211
egdanielb2df0c22016-05-13 11:30:37 -0700212void GrVkImage::DestroyImageInfo(const GrVkGpu* gpu, GrVkImageInfo* info) {
213 VK_CALL(gpu, DestroyImage(gpu->device(), info->fImage, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700214 bool isLinear = VK_IMAGE_TILING_LINEAR == info->fImageTiling;
215 GrVkMemory::FreeImageMemory(gpu, isLinear, info->fAlloc);
egdanielb2df0c22016-05-13 11:30:37 -0700216}
217
Greg Daniel164a9f02016-02-22 09:56:40 -0500218GrVkImage::~GrVkImage() {
219 // should have been released or abandoned first
220 SkASSERT(!fResource);
221}
222
Greg Danielbae71212019-03-01 15:24:35 -0500223void GrVkImage::prepareForPresent(GrVkGpu* gpu) {
224 VkImageLayout layout = this->currentLayout();
225 if (fInitialQueueFamily != VK_QUEUE_FAMILY_EXTERNAL &&
226 fInitialQueueFamily != VK_QUEUE_FAMILY_FOREIGN_EXT) {
227 if (gpu->vkCaps().supportsSwapchain()) {
228 layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
229 }
230 }
231 this->setImageLayout(gpu, layout, 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false, true);
232}
233
Greg Daniel797efca2019-05-09 14:04:20 -0400234void GrVkImage::prepareForExternal(GrVkGpu* gpu) {
235 this->setImageLayout(gpu, this->currentLayout(), 0, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, false,
236 true);
237}
238
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500239void GrVkImage::releaseImage(GrVkGpu* gpu) {
Greg Danielecddbc02018-08-30 16:39:34 -0400240 if (fInfo.fCurrentQueueFamily != fInitialQueueFamily) {
Greg Daniel950dfd72019-02-04 11:16:37 -0500241 // The Vulkan spec is vague on what to put for the dstStageMask here. The spec for image
242 // memory barrier says the dstStageMask must not be zero. However, in the spec when it talks
243 // about family queue transfers it says the dstStageMask is ignored and should be set to
244 // zero. Assuming it really is ignored we set it to VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT here
245 // since it makes the Vulkan validation layers happy.
246 this->setImageLayout(gpu, this->currentLayout(), 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
247 false, true);
Greg Danielecddbc02018-08-30 16:39:34 -0400248 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500249 if (fResource) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500250 fResource->removeOwningTexture();
Greg Daniel164a9f02016-02-22 09:56:40 -0500251 fResource->unref(gpu);
252 fResource = nullptr;
253 }
254}
255
256void GrVkImage::abandonImage() {
257 if (fResource) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500258 fResource->removeOwningTexture();
Greg Daniel164a9f02016-02-22 09:56:40 -0500259 fResource->unrefAndAbandon();
260 fResource = nullptr;
261 }
262}
263
Brian Salomonb2c5dae2019-03-04 10:25:17 -0500264void GrVkImage::setResourceRelease(sk_sp<GrRefCntedCallback> releaseHelper) {
Greg Danielb46add82019-01-02 14:51:29 -0500265 SkASSERT(fResource);
Greg Danielcef213c2017-04-21 11:52:27 -0400266 // Forward the release proc on to GrVkImage::Resource
Greg Daniel6a0176b2018-01-30 09:28:44 -0500267 fResource->setRelease(std::move(releaseHelper));
Greg Danielcef213c2017-04-21 11:52:27 -0400268}
269
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500270void GrVkImage::Resource::freeGPUData(GrVkGpu* gpu) const {
Brian Salomon8cabb322019-02-22 10:44:19 -0500271 this->invokeReleaseProc();
Greg Daniel164a9f02016-02-22 09:56:40 -0500272 VK_CALL(gpu, DestroyImage(gpu->device(), fImage, nullptr));
jvanverth6b6ffc42016-06-13 14:28:07 -0700273 bool isLinear = (VK_IMAGE_TILING_LINEAR == fImageTiling);
274 GrVkMemory::FreeImageMemory(gpu, isLinear, fAlloc);
jvanverth0fcfb752016-03-09 09:57:52 -0800275}
jvanverthfe170d22016-03-22 13:15:44 -0700276
Brian Salomone80b8092019-03-08 13:25:19 -0500277void GrVkImage::Resource::addIdleProc(GrVkTexture* owningTexture,
278 sk_sp<GrRefCntedCallback> idleProc) const {
279 SkASSERT(!fOwningTexture || fOwningTexture == owningTexture);
280 fOwningTexture = owningTexture;
281 fIdleProcs.push_back(std::move(idleProc));
Brian Salomon614c1a82018-12-19 15:42:06 -0500282}
283
Brian Salomone80b8092019-03-08 13:25:19 -0500284int GrVkImage::Resource::idleProcCnt() const { return fIdleProcs.count(); }
285
286sk_sp<GrRefCntedCallback> GrVkImage::Resource::idleProc(int i) const { return fIdleProcs[i]; }
287
288void GrVkImage::Resource::resetIdleProcs() const { fIdleProcs.reset(); }
289
Brian Salomon614c1a82018-12-19 15:42:06 -0500290void GrVkImage::Resource::removeOwningTexture() const { fOwningTexture = nullptr; }
291
292void GrVkImage::Resource::notifyAddedToCommandBuffer() const { ++fNumCommandBufferOwners; }
293
294void GrVkImage::Resource::notifyRemovedFromCommandBuffer() const {
295 SkASSERT(fNumCommandBufferOwners);
Brian Salomone80b8092019-03-08 13:25:19 -0500296 if (--fNumCommandBufferOwners || !fIdleProcs.count()) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500297 return;
298 }
Brian Salomon614c1a82018-12-19 15:42:06 -0500299 if (fOwningTexture) {
Brian Salomonb2c5dae2019-03-04 10:25:17 -0500300 if (fOwningTexture->resourcePriv().hasRefOrPendingIO()) {
Brian Salomone80b8092019-03-08 13:25:19 -0500301 // Wait for the texture to become idle in the cache to call the procs.
Brian Salomonb2c5dae2019-03-04 10:25:17 -0500302 return;
303 }
Brian Salomone80b8092019-03-08 13:25:19 -0500304 fOwningTexture->callIdleProcsOnBehalfOfResource();
305 } else {
306 fIdleProcs.reset();
Brian Salomon614c1a82018-12-19 15:42:06 -0500307 }
308}
309
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500310void GrVkImage::BorrowedResource::freeGPUData(GrVkGpu* gpu) const {
Greg Danielcef213c2017-04-21 11:52:27 -0400311 this->invokeReleaseProc();
jvanverthfe170d22016-03-22 13:15:44 -0700312}
Greg Danielcef213c2017-04-21 11:52:27 -0400313
314void GrVkImage::BorrowedResource::abandonGPUData() const {
315 this->invokeReleaseProc();
316}
317
Greg Daniel59dc1482019-02-22 10:46:38 -0500318#if GR_TEST_UTILS
319void GrVkImage::setCurrentQueueFamilyToGraphicsQueue(GrVkGpu* gpu) {
320 fInfo.fCurrentQueueFamily = gpu->queueIndex();
321}
322#endif
323