blob: ced9f8834e55052692dc6cb94d3a78d7cc35c3ac [file] [log] [blame]
Greg Daniel52e16d92018-04-10 09:34:07 -04001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkTypes.h"
Greg Daniel52e16d92018-04-10 09:34:07 -040011
Brian Osmanc7ad40f2018-05-31 14:27:17 -040012#if defined(SK_VULKAN)
Greg Daniel52e16d92018-04-10 09:34:07 -040013
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "include/gpu/vk/GrVkVulkan.h"
Greg Daniel54bfb182018-11-20 17:12:36 -050015
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "tests/Test.h"
Greg Daniel52e16d92018-04-10 09:34:07 -040017
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "include/core/SkImage.h"
19#include "include/gpu/GrBackendSurface.h"
20#include "include/gpu/GrTexture.h"
21#include "include/gpu/vk/GrVkTypes.h"
22#include "include/private/GrTextureProxy.h"
23#include "src/gpu/GrContextPriv.h"
Greg Daniel797efca2019-05-09 14:04:20 -040024#include "src/gpu/GrRenderTargetContext.h"
25#include "src/gpu/SkGpuDevice.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050026#include "src/gpu/vk/GrVkGpu.h"
27#include "src/gpu/vk/GrVkImageLayout.h"
28#include "src/gpu/vk/GrVkTexture.h"
29#include "src/image/SkImage_Base.h"
Greg Daniel797efca2019-05-09 14:04:20 -040030#include "src/image/SkSurface_Gpu.h"
Greg Daniel52e16d92018-04-10 09:34:07 -040031
32DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkImageLayoutTest, reporter, ctxInfo) {
33 GrContext* context = ctxInfo.grContext();
Greg Daniel52e16d92018-04-10 09:34:07 -040034
Robert Phillips4bdd36f2019-06-04 11:03:06 -040035 GrBackendTexture backendTex = context->createBackendTexture(1, 1,
36 kRGBA_8888_SkColorType,
37 SkColors::kTransparent,
38 GrMipMapped::kNo,
39 GrRenderable::kNo);
Greg Daniel52e16d92018-04-10 09:34:07 -040040 REPORTER_ASSERT(reporter, backendTex.isValid());
41
42 GrVkImageInfo info;
43 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
44 VkImageLayout initLayout = info.fImageLayout;
45
46 // Verify that setting that layout via a copy of a backendTexture is reflected in all the
47 // backendTextures.
48 GrBackendTexture backendTexCopy = backendTex;
49 REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info));
50 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
51
52 backendTexCopy.setVkImageLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
53
54 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
55 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == info.fImageLayout);
56
57 REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info));
58 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == info.fImageLayout);
59
60 // Setting back the layout since we didn't actually change it
61 backendTex.setVkImageLayout(initLayout);
62
63 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(context, backendTex,
64 kTopLeft_GrSurfaceOrigin,
65 kRGBA_8888_SkColorType,
66 kPremul_SkAlphaType, nullptr);
67 REPORTER_ASSERT(reporter, wrappedImage.get());
68
Robert Phillips6603a172019-03-05 12:35:44 -050069 sk_sp<GrTextureProxy> texProxy = as_IB(wrappedImage)->asTextureProxyRef(context);
Greg Daniel52e16d92018-04-10 09:34:07 -040070 REPORTER_ASSERT(reporter, texProxy.get());
Brian Salomonfd98c2c2018-07-31 17:25:29 -040071 REPORTER_ASSERT(reporter, texProxy->isInstantiated());
72 GrTexture* texture = texProxy->peekTexture();
Greg Daniel52e16d92018-04-10 09:34:07 -040073 REPORTER_ASSERT(reporter, texture);
74
75 // Verify that modifying the layout via the GrVkTexture is reflected in the GrBackendTexture
76 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
77 REPORTER_ASSERT(reporter, initLayout == vkTexture->currentLayout());
78 vkTexture->updateImageLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
79
80 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
81 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == info.fImageLayout);
82
83 GrBackendTexture backendTexImage = wrappedImage->getBackendTexture(false);
84 REPORTER_ASSERT(reporter, backendTexImage.getVkImageInfo(&info));
85 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == info.fImageLayout);
86
87 // Verify that modifying the layout via the GrBackendTexutre is reflected in the GrVkTexture
88 backendTexImage.setVkImageLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
89 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == vkTexture->currentLayout());
90
Greg Daniel52e16d92018-04-10 09:34:07 -040091 vkTexture->updateImageLayout(initLayout);
92
93 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
94 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
95
96 REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info));
97 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
98
99 REPORTER_ASSERT(reporter, backendTexImage.getVkImageInfo(&info));
100 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
101
102 // Check that we can do things like assigning the backend texture to invalid one, assign an
103 // invalid one, assin a backend texture to inself etc. Success here is that we don't hit any of
104 // our ref counting asserts.
105 REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(backendTex, backendTexCopy));
106
107 GrBackendTexture invalidTexture;
108 REPORTER_ASSERT(reporter, !invalidTexture.isValid());
109 REPORTER_ASSERT(reporter, !GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTexCopy));
110
111 backendTexCopy = invalidTexture;
112 REPORTER_ASSERT(reporter, !backendTexCopy.isValid());
113 REPORTER_ASSERT(reporter, !GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTexCopy));
114
115 invalidTexture = backendTex;
116 REPORTER_ASSERT(reporter, invalidTexture.isValid());
117 REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTex));
118
Ben Wagnerff134f22018-04-24 16:29:16 -0400119 invalidTexture = static_cast<decltype(invalidTexture)&>(invalidTexture);
Greg Daniel52e16d92018-04-10 09:34:07 -0400120 REPORTER_ASSERT(reporter, invalidTexture.isValid());
121 REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(invalidTexture, invalidTexture));
122
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400123 context->deleteBackendTexture(backendTex);
Greg Daniel52e16d92018-04-10 09:34:07 -0400124}
125
Greg Daniel59dc1482019-02-22 10:46:38 -0500126static void testing_release_proc(void* ctx) {
127 int* count = (int*)ctx;
128 *count += 1;
129}
130
131// Test to make sure we don't call our release proc on an image until we've transferred it back to
132// its original queue family.
133DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkReleaseExternalQueueTest, reporter, ctxInfo) {
134 GrContext* context = ctxInfo.grContext();
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400135 GrGpu* gpu = context->priv().getGpu();
136 GrVkGpu* vkGpu = static_cast<GrVkGpu*>(gpu);
137 if (!vkGpu->vkCaps().supportsExternalMemory()) {
Greg Daniel59dc1482019-02-22 10:46:38 -0500138 return;
139 }
140
141 for (bool useExternal : {false, true}) {
Robert Phillips4bdd36f2019-06-04 11:03:06 -0400142 GrBackendTexture backendTex = context->createBackendTexture(1, 1,
143 kRGBA_8888_SkColorType,
144 SkColors::kTransparent,
145 GrMipMapped::kNo,
146 GrRenderable::kNo);
Greg Daniel59dc1482019-02-22 10:46:38 -0500147 sk_sp<SkImage> image;
148 int count = 0;
149 if (useExternal) {
150 // Make a backend texture with an external queue family;
151 GrVkImageInfo vkInfo;
152 if (!backendTex.getVkImageInfo(&vkInfo)) {
153 return;
154 }
155 vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
156
157 GrBackendTexture vkExtTex(1, 1, vkInfo);
158 REPORTER_ASSERT(reporter, vkExtTex.isValid());
159 image = SkImage::MakeFromTexture(context, vkExtTex,
160 kTopLeft_GrSurfaceOrigin,
161 kRGBA_8888_SkColorType,
162 kPremul_SkAlphaType,
163 nullptr, testing_release_proc,
164 (void*)&count);
165
166 } else {
167 image = SkImage::MakeFromTexture(context, backendTex,
168 kTopLeft_GrSurfaceOrigin,
169 kRGBA_8888_SkColorType,
170 kPremul_SkAlphaType,
171 nullptr, testing_release_proc,
172 (void*)&count);
173 }
174
175 if (!image) {
176 continue;
177 }
178
179 REPORTER_ASSERT(reporter, !count);
180
181 GrTexture* texture = image->getTexture();
182 REPORTER_ASSERT(reporter, texture);
183 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
184
185 if (useExternal) {
186 // Testing helper so we claim that we don't need to transition from our fake external
187 // queue first.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400188 vkTex->setCurrentQueueFamilyToGraphicsQueue(vkGpu);
Greg Daniel59dc1482019-02-22 10:46:38 -0500189 }
190
191 image.reset();
192
193 // Resetting the image should only trigger the release proc if we are not using an external
194 // queue. When using an external queue when we free the SkImage and the underlying
195 // GrTexture, we submit a queue transition on the command buffer.
196 if (useExternal) {
197 REPORTER_ASSERT(reporter, !count);
198 } else {
199 REPORTER_ASSERT(reporter, count == 1);
200 }
201
202 gpu->testingOnly_flushGpuAndSync();
203
204 // Now that we flushed and waited the release proc should have be triggered.
205 REPORTER_ASSERT(reporter, count == 1);
206
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400207 context->deleteBackendTexture(backendTex);
Greg Daniel59dc1482019-02-22 10:46:38 -0500208 }
209}
210
Greg Daniel797efca2019-05-09 14:04:20 -0400211// Test to make sure we transition to the original queue when requests for prepareforexternalio are
212// in flush calls
213DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkPrepareForExternalIOQueueTransitionTest, reporter, ctxInfo) {
214 GrContext* context = ctxInfo.grContext();
Robert Phillips9b16f812019-05-17 10:01:21 -0400215
216 GrVkGpu* vkGpu = static_cast<GrVkGpu*>(context->priv().getGpu());
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400217 if (!vkGpu->vkCaps().supportsExternalMemory()) {
Greg Daniel797efca2019-05-09 14:04:20 -0400218 return;
219 }
220
221 for (bool useSurface : {false, true}) {
222 for (bool preparePresent : {false, true}) {
223 if (!useSurface && preparePresent) {
224 // We don't set textures to present
225 continue;
226 }
Robert Phillips4bdd36f2019-06-04 11:03:06 -0400227 GrBackendTexture backendTex = context->createBackendTexture(
Robert Phillips80626792019-06-04 07:16:10 -0400228 4, 4, kRGBA_8888_SkColorType,
229 SkColors::kTransparent, GrMipMapped::kNo,
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400230 useSurface ? GrRenderable::kYes : GrRenderable::kNo);
Greg Daniel797efca2019-05-09 14:04:20 -0400231
232 // Make a backend texture with an external queue family and general layout.
233 GrVkImageInfo vkInfo;
234 if (!backendTex.getVkImageInfo(&vkInfo)) {
235 return;
236 }
237
238 // We can't actually make an external texture in our test. However, we lie and say it is
239 // and then will manually go and swap the queue to the graphics queue once we wrap it.
240 if (preparePresent) {
241 // We don't transition to present to things that are going to external for foreign
242 // queues.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400243 vkInfo.fCurrentQueueFamily = vkGpu->queueIndex();
Greg Daniel797efca2019-05-09 14:04:20 -0400244 } else {
245 vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
246 }
247
248 GrBackendTexture vkExtTex(1, 1, vkInfo);
249
250 sk_sp<SkImage> image;
251 sk_sp<SkSurface> surface;
252 GrTexture* texture;
253 if (useSurface) {
254 surface = SkSurface::MakeFromBackendTexture(context, vkExtTex,
255 kTopLeft_GrSurfaceOrigin, 0, kRGBA_8888_SkColorType, nullptr, nullptr);
256 REPORTER_ASSERT(reporter, surface.get());
257 if (!surface) {
258 continue;
259 }
260 SkSurface_Gpu* gpuSurface = static_cast<SkSurface_Gpu*>(surface.get());
261 auto* rtc = gpuSurface->getDevice()->accessRenderTargetContext();
262 texture = rtc->asTextureProxy()->peekTexture();
263 } else {
264 image = SkImage::MakeFromTexture(context, vkExtTex, kTopLeft_GrSurfaceOrigin,
265 kRGBA_8888_SkColorType, kPremul_SkAlphaType, nullptr, nullptr, nullptr);
266
267 REPORTER_ASSERT(reporter, image.get());
268 if (!image) {
269 continue;
270 }
271
272 texture = image->getTexture();
273 }
274
275 REPORTER_ASSERT(reporter, texture);
276 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
277
278 // Testing helper so we claim that we don't need to transition from our fake external
279 // queue first.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400280 vkTex->setCurrentQueueFamilyToGraphicsQueue(vkGpu);
Greg Daniel797efca2019-05-09 14:04:20 -0400281
282 GrBackendTexture newBackendTexture;
283 if (useSurface) {
284 newBackendTexture = surface->getBackendTexture(
285 SkSurface::kFlushRead_TextureHandleAccess);
286 } else {
287 newBackendTexture = image->getBackendTexture(false);
288 }
289 GrVkImageInfo newVkInfo;
290 REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo));
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400291 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex());
Greg Daniel797efca2019-05-09 14:04:20 -0400292 VkImageLayout oldLayout = newVkInfo.fImageLayout;
293
294 GrPrepareForExternalIORequests externalRequests;
295 SkImage* imagePtr;
296 SkSurface* surfacePtr;
297 if (useSurface) {
298 externalRequests.fNumSurfaces = 1;
299 surfacePtr = surface.get();
300 externalRequests.fSurfaces = &surfacePtr;
301 externalRequests.fPrepareSurfaceForPresent = &preparePresent;
302 } else {
303 externalRequests.fNumImages = 1;
304 imagePtr = image.get();
305 externalRequests.fImages = &imagePtr;
306
307 }
308 context->flush(GrFlushInfo(), externalRequests);
309
310 if (useSurface) {
311 newBackendTexture = surface->getBackendTexture(
312 SkSurface::kFlushRead_TextureHandleAccess);
313 } else {
314 newBackendTexture = image->getBackendTexture(false);
315 }
316 REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo));
317 if (preparePresent) {
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400318 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex());
Greg Daniel797efca2019-05-09 14:04:20 -0400319 REPORTER_ASSERT(reporter,
320 newVkInfo.fImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
321 } else {
322 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == VK_QUEUE_FAMILY_EXTERNAL);
323 REPORTER_ASSERT(reporter, newVkInfo.fImageLayout == oldLayout);
324 }
325
326 GrFlushInfo flushInfo;
327 flushInfo.fFlags = kSyncCpu_GrFlushFlag;
328 context->flush(flushInfo);
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400329 context->deleteBackendTexture(backendTex);
Greg Daniel797efca2019-05-09 14:04:20 -0400330 }
331 }
332}
333
334
Eric Karl3f219cb2019-03-22 17:46:55 -0700335// Test to make sure we transition from the EXTERNAL queue even when no layout transition is needed.
336DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkTransitionExternalQueueTest, reporter, ctxInfo) {
337 GrContext* context = ctxInfo.grContext();
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400338 GrGpu* gpu = context->priv().getGpu();
339 GrVkGpu* vkGpu = static_cast<GrVkGpu*>(gpu);
340 if (!vkGpu->vkCaps().supportsExternalMemory()) {
Eric Karl3f219cb2019-03-22 17:46:55 -0700341 return;
342 }
343
Robert Phillips4bdd36f2019-06-04 11:03:06 -0400344 GrBackendTexture backendTex = context->createBackendTexture(
Robert Phillips80626792019-06-04 07:16:10 -0400345 1, 1, kRGBA_8888_SkColorType,
346 SkColors::kTransparent, GrMipMapped::kNo, GrRenderable::kNo);
Eric Karl3f219cb2019-03-22 17:46:55 -0700347 sk_sp<SkImage> image;
348 // Make a backend texture with an external queue family and general layout.
349 GrVkImageInfo vkInfo;
350 if (!backendTex.getVkImageInfo(&vkInfo)) {
351 return;
352 }
353 vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
354 // Use a read-only layout as these are the ones where we can otherwise skip a transition.
355 vkInfo.fImageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
356
357 GrBackendTexture vkExtTex(1, 1, vkInfo);
358 REPORTER_ASSERT(reporter, vkExtTex.isValid());
359 image = SkImage::MakeFromTexture(context, vkExtTex, kTopLeft_GrSurfaceOrigin,
360 kRGBA_8888_SkColorType, kPremul_SkAlphaType, nullptr, nullptr,
361 nullptr);
362
363 if (!image) {
364 return;
365 }
366
367 GrTexture* texture = image->getTexture();
368 REPORTER_ASSERT(reporter, texture);
369 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
370
371 // Change our backend texture to the internal queue, with the same layout. This should force a
372 // queue transition even though the layouts match.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400373 vkTex->setImageLayout(vkGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 0,
Eric Karl3f219cb2019-03-22 17:46:55 -0700374 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, false, false);
375
376 // Get our image info again and make sure we transitioned queues.
377 GrBackendTexture newBackendTexture = image->getBackendTexture(true);
378 GrVkImageInfo newVkInfo;
379 REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo));
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400380 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex());
Eric Karl3f219cb2019-03-22 17:46:55 -0700381
382 image.reset();
383 gpu->testingOnly_flushGpuAndSync();
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400384 context->deleteBackendTexture(backendTex);
Eric Karl3f219cb2019-03-22 17:46:55 -0700385}
386
Greg Daniel52e16d92018-04-10 09:34:07 -0400387#endif