Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | // This is a GPU-backend specific test. It relies on static intializers to work |
| 9 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 10 | #include "include/core/SkTypes.h" |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 11 | |
Brian Osman | c7ad40f | 2018-05-31 14:27:17 -0400 | [diff] [blame] | 12 | #if defined(SK_VULKAN) |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 13 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "include/gpu/vk/GrVkVulkan.h" |
Greg Daniel | 54bfb18 | 2018-11-20 17:12:36 -0500 | [diff] [blame] | 15 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 16 | #include "tests/Test.h" |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 17 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 18 | #include "include/core/SkImage.h" |
| 19 | #include "include/gpu/GrBackendSurface.h" |
| 20 | #include "include/gpu/GrTexture.h" |
| 21 | #include "include/gpu/vk/GrVkTypes.h" |
| 22 | #include "include/private/GrTextureProxy.h" |
| 23 | #include "src/gpu/GrContextPriv.h" |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 24 | #include "src/gpu/GrRenderTargetContext.h" |
| 25 | #include "src/gpu/SkGpuDevice.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 26 | #include "src/gpu/vk/GrVkGpu.h" |
| 27 | #include "src/gpu/vk/GrVkImageLayout.h" |
| 28 | #include "src/gpu/vk/GrVkTexture.h" |
| 29 | #include "src/image/SkImage_Base.h" |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 30 | #include "src/image/SkSurface_Gpu.h" |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 31 | |
| 32 | DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkImageLayoutTest, reporter, ctxInfo) { |
| 33 | GrContext* context = ctxInfo.grContext(); |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 34 | |
Robert Phillips | 4bdd36f | 2019-06-04 11:03:06 -0400 | [diff] [blame] | 35 | GrBackendTexture backendTex = context->createBackendTexture(1, 1, |
| 36 | kRGBA_8888_SkColorType, |
| 37 | SkColors::kTransparent, |
| 38 | GrMipMapped::kNo, |
| 39 | GrRenderable::kNo); |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 40 | REPORTER_ASSERT(reporter, backendTex.isValid()); |
| 41 | |
| 42 | GrVkImageInfo info; |
| 43 | REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info)); |
| 44 | VkImageLayout initLayout = info.fImageLayout; |
| 45 | |
| 46 | // Verify that setting that layout via a copy of a backendTexture is reflected in all the |
| 47 | // backendTextures. |
| 48 | GrBackendTexture backendTexCopy = backendTex; |
| 49 | REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info)); |
| 50 | REPORTER_ASSERT(reporter, initLayout == info.fImageLayout); |
| 51 | |
| 52 | backendTexCopy.setVkImageLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); |
| 53 | |
| 54 | REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info)); |
| 55 | REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == info.fImageLayout); |
| 56 | |
| 57 | REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info)); |
| 58 | REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == info.fImageLayout); |
| 59 | |
| 60 | // Setting back the layout since we didn't actually change it |
| 61 | backendTex.setVkImageLayout(initLayout); |
| 62 | |
| 63 | sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(context, backendTex, |
| 64 | kTopLeft_GrSurfaceOrigin, |
| 65 | kRGBA_8888_SkColorType, |
| 66 | kPremul_SkAlphaType, nullptr); |
| 67 | REPORTER_ASSERT(reporter, wrappedImage.get()); |
| 68 | |
Robert Phillips | 6603a17 | 2019-03-05 12:35:44 -0500 | [diff] [blame] | 69 | sk_sp<GrTextureProxy> texProxy = as_IB(wrappedImage)->asTextureProxyRef(context); |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 70 | REPORTER_ASSERT(reporter, texProxy.get()); |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 71 | REPORTER_ASSERT(reporter, texProxy->isInstantiated()); |
| 72 | GrTexture* texture = texProxy->peekTexture(); |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 73 | REPORTER_ASSERT(reporter, texture); |
| 74 | |
| 75 | // Verify that modifying the layout via the GrVkTexture is reflected in the GrBackendTexture |
| 76 | GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture); |
| 77 | REPORTER_ASSERT(reporter, initLayout == vkTexture->currentLayout()); |
| 78 | vkTexture->updateImageLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); |
| 79 | |
| 80 | REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info)); |
| 81 | REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == info.fImageLayout); |
| 82 | |
| 83 | GrBackendTexture backendTexImage = wrappedImage->getBackendTexture(false); |
| 84 | REPORTER_ASSERT(reporter, backendTexImage.getVkImageInfo(&info)); |
| 85 | REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == info.fImageLayout); |
| 86 | |
| 87 | // Verify that modifying the layout via the GrBackendTexutre is reflected in the GrVkTexture |
| 88 | backendTexImage.setVkImageLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); |
| 89 | REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == vkTexture->currentLayout()); |
| 90 | |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 91 | vkTexture->updateImageLayout(initLayout); |
| 92 | |
| 93 | REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info)); |
| 94 | REPORTER_ASSERT(reporter, initLayout == info.fImageLayout); |
| 95 | |
| 96 | REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info)); |
| 97 | REPORTER_ASSERT(reporter, initLayout == info.fImageLayout); |
| 98 | |
| 99 | REPORTER_ASSERT(reporter, backendTexImage.getVkImageInfo(&info)); |
| 100 | REPORTER_ASSERT(reporter, initLayout == info.fImageLayout); |
| 101 | |
| 102 | // Check that we can do things like assigning the backend texture to invalid one, assign an |
| 103 | // invalid one, assin a backend texture to inself etc. Success here is that we don't hit any of |
| 104 | // our ref counting asserts. |
| 105 | REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(backendTex, backendTexCopy)); |
| 106 | |
| 107 | GrBackendTexture invalidTexture; |
| 108 | REPORTER_ASSERT(reporter, !invalidTexture.isValid()); |
| 109 | REPORTER_ASSERT(reporter, !GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTexCopy)); |
| 110 | |
| 111 | backendTexCopy = invalidTexture; |
| 112 | REPORTER_ASSERT(reporter, !backendTexCopy.isValid()); |
| 113 | REPORTER_ASSERT(reporter, !GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTexCopy)); |
| 114 | |
| 115 | invalidTexture = backendTex; |
| 116 | REPORTER_ASSERT(reporter, invalidTexture.isValid()); |
| 117 | REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTex)); |
| 118 | |
Ben Wagner | ff134f2 | 2018-04-24 16:29:16 -0400 | [diff] [blame] | 119 | invalidTexture = static_cast<decltype(invalidTexture)&>(invalidTexture); |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 120 | REPORTER_ASSERT(reporter, invalidTexture.isValid()); |
| 121 | REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(invalidTexture, invalidTexture)); |
| 122 | |
Robert Phillips | 5c7a25b | 2019-05-20 08:38:07 -0400 | [diff] [blame] | 123 | context->deleteBackendTexture(backendTex); |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 124 | } |
| 125 | |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 126 | static void testing_release_proc(void* ctx) { |
| 127 | int* count = (int*)ctx; |
| 128 | *count += 1; |
| 129 | } |
| 130 | |
| 131 | // Test to make sure we don't call our release proc on an image until we've transferred it back to |
| 132 | // its original queue family. |
| 133 | DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkReleaseExternalQueueTest, reporter, ctxInfo) { |
| 134 | GrContext* context = ctxInfo.grContext(); |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 135 | GrGpu* gpu = context->priv().getGpu(); |
| 136 | GrVkGpu* vkGpu = static_cast<GrVkGpu*>(gpu); |
| 137 | if (!vkGpu->vkCaps().supportsExternalMemory()) { |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 138 | return; |
| 139 | } |
| 140 | |
| 141 | for (bool useExternal : {false, true}) { |
Robert Phillips | 4bdd36f | 2019-06-04 11:03:06 -0400 | [diff] [blame] | 142 | GrBackendTexture backendTex = context->createBackendTexture(1, 1, |
| 143 | kRGBA_8888_SkColorType, |
| 144 | SkColors::kTransparent, |
| 145 | GrMipMapped::kNo, |
| 146 | GrRenderable::kNo); |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 147 | sk_sp<SkImage> image; |
| 148 | int count = 0; |
| 149 | if (useExternal) { |
| 150 | // Make a backend texture with an external queue family; |
| 151 | GrVkImageInfo vkInfo; |
| 152 | if (!backendTex.getVkImageInfo(&vkInfo)) { |
| 153 | return; |
| 154 | } |
| 155 | vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; |
| 156 | |
| 157 | GrBackendTexture vkExtTex(1, 1, vkInfo); |
| 158 | REPORTER_ASSERT(reporter, vkExtTex.isValid()); |
| 159 | image = SkImage::MakeFromTexture(context, vkExtTex, |
| 160 | kTopLeft_GrSurfaceOrigin, |
| 161 | kRGBA_8888_SkColorType, |
| 162 | kPremul_SkAlphaType, |
| 163 | nullptr, testing_release_proc, |
| 164 | (void*)&count); |
| 165 | |
| 166 | } else { |
| 167 | image = SkImage::MakeFromTexture(context, backendTex, |
| 168 | kTopLeft_GrSurfaceOrigin, |
| 169 | kRGBA_8888_SkColorType, |
| 170 | kPremul_SkAlphaType, |
| 171 | nullptr, testing_release_proc, |
| 172 | (void*)&count); |
| 173 | } |
| 174 | |
| 175 | if (!image) { |
| 176 | continue; |
| 177 | } |
| 178 | |
| 179 | REPORTER_ASSERT(reporter, !count); |
| 180 | |
| 181 | GrTexture* texture = image->getTexture(); |
| 182 | REPORTER_ASSERT(reporter, texture); |
| 183 | GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture); |
| 184 | |
| 185 | if (useExternal) { |
| 186 | // Testing helper so we claim that we don't need to transition from our fake external |
| 187 | // queue first. |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 188 | vkTex->setCurrentQueueFamilyToGraphicsQueue(vkGpu); |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 189 | } |
| 190 | |
| 191 | image.reset(); |
| 192 | |
| 193 | // Resetting the image should only trigger the release proc if we are not using an external |
| 194 | // queue. When using an external queue when we free the SkImage and the underlying |
| 195 | // GrTexture, we submit a queue transition on the command buffer. |
| 196 | if (useExternal) { |
| 197 | REPORTER_ASSERT(reporter, !count); |
| 198 | } else { |
| 199 | REPORTER_ASSERT(reporter, count == 1); |
| 200 | } |
| 201 | |
| 202 | gpu->testingOnly_flushGpuAndSync(); |
| 203 | |
| 204 | // Now that we flushed and waited the release proc should have be triggered. |
| 205 | REPORTER_ASSERT(reporter, count == 1); |
| 206 | |
Robert Phillips | 5c7a25b | 2019-05-20 08:38:07 -0400 | [diff] [blame] | 207 | context->deleteBackendTexture(backendTex); |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 208 | } |
| 209 | } |
| 210 | |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 211 | // Test to make sure we transition to the original queue when requests for prepareforexternalio are |
| 212 | // in flush calls |
| 213 | DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkPrepareForExternalIOQueueTransitionTest, reporter, ctxInfo) { |
| 214 | GrContext* context = ctxInfo.grContext(); |
Robert Phillips | 9b16f81 | 2019-05-17 10:01:21 -0400 | [diff] [blame] | 215 | |
| 216 | GrVkGpu* vkGpu = static_cast<GrVkGpu*>(context->priv().getGpu()); |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 217 | if (!vkGpu->vkCaps().supportsExternalMemory()) { |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 218 | return; |
| 219 | } |
| 220 | |
| 221 | for (bool useSurface : {false, true}) { |
| 222 | for (bool preparePresent : {false, true}) { |
| 223 | if (!useSurface && preparePresent) { |
| 224 | // We don't set textures to present |
| 225 | continue; |
| 226 | } |
Robert Phillips | 4bdd36f | 2019-06-04 11:03:06 -0400 | [diff] [blame] | 227 | GrBackendTexture backendTex = context->createBackendTexture( |
Robert Phillips | 8062679 | 2019-06-04 07:16:10 -0400 | [diff] [blame] | 228 | 4, 4, kRGBA_8888_SkColorType, |
| 229 | SkColors::kTransparent, GrMipMapped::kNo, |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 230 | useSurface ? GrRenderable::kYes : GrRenderable::kNo); |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 231 | |
| 232 | // Make a backend texture with an external queue family and general layout. |
| 233 | GrVkImageInfo vkInfo; |
| 234 | if (!backendTex.getVkImageInfo(&vkInfo)) { |
| 235 | return; |
| 236 | } |
| 237 | |
| 238 | // We can't actually make an external texture in our test. However, we lie and say it is |
| 239 | // and then will manually go and swap the queue to the graphics queue once we wrap it. |
| 240 | if (preparePresent) { |
| 241 | // We don't transition to present to things that are going to external for foreign |
| 242 | // queues. |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 243 | vkInfo.fCurrentQueueFamily = vkGpu->queueIndex(); |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 244 | } else { |
| 245 | vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; |
| 246 | } |
| 247 | |
| 248 | GrBackendTexture vkExtTex(1, 1, vkInfo); |
| 249 | |
| 250 | sk_sp<SkImage> image; |
| 251 | sk_sp<SkSurface> surface; |
| 252 | GrTexture* texture; |
| 253 | if (useSurface) { |
| 254 | surface = SkSurface::MakeFromBackendTexture(context, vkExtTex, |
| 255 | kTopLeft_GrSurfaceOrigin, 0, kRGBA_8888_SkColorType, nullptr, nullptr); |
| 256 | REPORTER_ASSERT(reporter, surface.get()); |
| 257 | if (!surface) { |
| 258 | continue; |
| 259 | } |
| 260 | SkSurface_Gpu* gpuSurface = static_cast<SkSurface_Gpu*>(surface.get()); |
| 261 | auto* rtc = gpuSurface->getDevice()->accessRenderTargetContext(); |
| 262 | texture = rtc->asTextureProxy()->peekTexture(); |
| 263 | } else { |
| 264 | image = SkImage::MakeFromTexture(context, vkExtTex, kTopLeft_GrSurfaceOrigin, |
| 265 | kRGBA_8888_SkColorType, kPremul_SkAlphaType, nullptr, nullptr, nullptr); |
| 266 | |
| 267 | REPORTER_ASSERT(reporter, image.get()); |
| 268 | if (!image) { |
| 269 | continue; |
| 270 | } |
| 271 | |
| 272 | texture = image->getTexture(); |
| 273 | } |
| 274 | |
| 275 | REPORTER_ASSERT(reporter, texture); |
| 276 | GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture); |
| 277 | |
| 278 | // Testing helper so we claim that we don't need to transition from our fake external |
| 279 | // queue first. |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 280 | vkTex->setCurrentQueueFamilyToGraphicsQueue(vkGpu); |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 281 | |
| 282 | GrBackendTexture newBackendTexture; |
| 283 | if (useSurface) { |
| 284 | newBackendTexture = surface->getBackendTexture( |
| 285 | SkSurface::kFlushRead_TextureHandleAccess); |
| 286 | } else { |
| 287 | newBackendTexture = image->getBackendTexture(false); |
| 288 | } |
| 289 | GrVkImageInfo newVkInfo; |
| 290 | REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo)); |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 291 | REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex()); |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 292 | VkImageLayout oldLayout = newVkInfo.fImageLayout; |
| 293 | |
| 294 | GrPrepareForExternalIORequests externalRequests; |
| 295 | SkImage* imagePtr; |
| 296 | SkSurface* surfacePtr; |
| 297 | if (useSurface) { |
| 298 | externalRequests.fNumSurfaces = 1; |
| 299 | surfacePtr = surface.get(); |
| 300 | externalRequests.fSurfaces = &surfacePtr; |
| 301 | externalRequests.fPrepareSurfaceForPresent = &preparePresent; |
| 302 | } else { |
| 303 | externalRequests.fNumImages = 1; |
| 304 | imagePtr = image.get(); |
| 305 | externalRequests.fImages = &imagePtr; |
| 306 | |
| 307 | } |
| 308 | context->flush(GrFlushInfo(), externalRequests); |
| 309 | |
| 310 | if (useSurface) { |
| 311 | newBackendTexture = surface->getBackendTexture( |
| 312 | SkSurface::kFlushRead_TextureHandleAccess); |
| 313 | } else { |
| 314 | newBackendTexture = image->getBackendTexture(false); |
| 315 | } |
| 316 | REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo)); |
| 317 | if (preparePresent) { |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 318 | REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex()); |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 319 | REPORTER_ASSERT(reporter, |
| 320 | newVkInfo.fImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); |
| 321 | } else { |
| 322 | REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == VK_QUEUE_FAMILY_EXTERNAL); |
| 323 | REPORTER_ASSERT(reporter, newVkInfo.fImageLayout == oldLayout); |
| 324 | } |
| 325 | |
| 326 | GrFlushInfo flushInfo; |
| 327 | flushInfo.fFlags = kSyncCpu_GrFlushFlag; |
| 328 | context->flush(flushInfo); |
Robert Phillips | 5c7a25b | 2019-05-20 08:38:07 -0400 | [diff] [blame] | 329 | context->deleteBackendTexture(backendTex); |
Greg Daniel | 797efca | 2019-05-09 14:04:20 -0400 | [diff] [blame] | 330 | } |
| 331 | } |
| 332 | } |
| 333 | |
| 334 | |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 335 | // Test to make sure we transition from the EXTERNAL queue even when no layout transition is needed. |
| 336 | DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkTransitionExternalQueueTest, reporter, ctxInfo) { |
| 337 | GrContext* context = ctxInfo.grContext(); |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 338 | GrGpu* gpu = context->priv().getGpu(); |
| 339 | GrVkGpu* vkGpu = static_cast<GrVkGpu*>(gpu); |
| 340 | if (!vkGpu->vkCaps().supportsExternalMemory()) { |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 341 | return; |
| 342 | } |
| 343 | |
Robert Phillips | 4bdd36f | 2019-06-04 11:03:06 -0400 | [diff] [blame] | 344 | GrBackendTexture backendTex = context->createBackendTexture( |
Robert Phillips | 8062679 | 2019-06-04 07:16:10 -0400 | [diff] [blame] | 345 | 1, 1, kRGBA_8888_SkColorType, |
| 346 | SkColors::kTransparent, GrMipMapped::kNo, GrRenderable::kNo); |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 347 | sk_sp<SkImage> image; |
| 348 | // Make a backend texture with an external queue family and general layout. |
| 349 | GrVkImageInfo vkInfo; |
| 350 | if (!backendTex.getVkImageInfo(&vkInfo)) { |
| 351 | return; |
| 352 | } |
| 353 | vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; |
| 354 | // Use a read-only layout as these are the ones where we can otherwise skip a transition. |
| 355 | vkInfo.fImageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; |
| 356 | |
| 357 | GrBackendTexture vkExtTex(1, 1, vkInfo); |
| 358 | REPORTER_ASSERT(reporter, vkExtTex.isValid()); |
| 359 | image = SkImage::MakeFromTexture(context, vkExtTex, kTopLeft_GrSurfaceOrigin, |
| 360 | kRGBA_8888_SkColorType, kPremul_SkAlphaType, nullptr, nullptr, |
| 361 | nullptr); |
| 362 | |
| 363 | if (!image) { |
| 364 | return; |
| 365 | } |
| 366 | |
| 367 | GrTexture* texture = image->getTexture(); |
| 368 | REPORTER_ASSERT(reporter, texture); |
| 369 | GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture); |
| 370 | |
| 371 | // Change our backend texture to the internal queue, with the same layout. This should force a |
| 372 | // queue transition even though the layouts match. |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 373 | vkTex->setImageLayout(vkGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 0, |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 374 | VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, false, false); |
| 375 | |
| 376 | // Get our image info again and make sure we transitioned queues. |
| 377 | GrBackendTexture newBackendTexture = image->getBackendTexture(true); |
| 378 | GrVkImageInfo newVkInfo; |
| 379 | REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo)); |
Robert Phillips | 9dbcdcc | 2019-05-13 10:40:06 -0400 | [diff] [blame] | 380 | REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex()); |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 381 | |
| 382 | image.reset(); |
| 383 | gpu->testingOnly_flushGpuAndSync(); |
Robert Phillips | 5c7a25b | 2019-05-20 08:38:07 -0400 | [diff] [blame] | 384 | context->deleteBackendTexture(backendTex); |
Eric Karl | 3f219cb | 2019-03-22 17:46:55 -0700 | [diff] [blame] | 385 | } |
| 386 | |
Greg Daniel | 52e16d9 | 2018-04-10 09:34:07 -0400 | [diff] [blame] | 387 | #endif |