blob: 58a2b9c93d0c56840db6d02baa58b6795ba494e6 [file] [log] [blame]
Greg Daniel52e16d92018-04-10 09:34:07 -04001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkTypes.h"
Greg Daniel52e16d92018-04-10 09:34:07 -040011
Brian Osmanc7ad40f2018-05-31 14:27:17 -040012#if defined(SK_VULKAN)
Greg Daniel52e16d92018-04-10 09:34:07 -040013
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "include/gpu/vk/GrVkVulkan.h"
Greg Daniel54bfb182018-11-20 17:12:36 -050015
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "tests/Test.h"
Greg Daniel52e16d92018-04-10 09:34:07 -040017
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "include/core/SkImage.h"
19#include "include/gpu/GrBackendSurface.h"
20#include "include/gpu/GrTexture.h"
21#include "include/gpu/vk/GrVkTypes.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050022#include "src/gpu/GrContextPriv.h"
Greg Daniel797efca2019-05-09 14:04:20 -040023#include "src/gpu/GrRenderTargetContext.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040024#include "src/gpu/GrTextureProxy.h"
Greg Daniel797efca2019-05-09 14:04:20 -040025#include "src/gpu/SkGpuDevice.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050026#include "src/gpu/vk/GrVkGpu.h"
27#include "src/gpu/vk/GrVkImageLayout.h"
28#include "src/gpu/vk/GrVkTexture.h"
29#include "src/image/SkImage_Base.h"
Greg Daniel797efca2019-05-09 14:04:20 -040030#include "src/image/SkSurface_Gpu.h"
Greg Daniel52e16d92018-04-10 09:34:07 -040031
32DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkImageLayoutTest, reporter, ctxInfo) {
33 GrContext* context = ctxInfo.grContext();
Greg Daniel52e16d92018-04-10 09:34:07 -040034
Robert Phillips4bdd36f2019-06-04 11:03:06 -040035 GrBackendTexture backendTex = context->createBackendTexture(1, 1,
36 kRGBA_8888_SkColorType,
37 SkColors::kTransparent,
38 GrMipMapped::kNo,
Robert Phillipsda2e67a2019-07-01 15:04:06 -040039 GrRenderable::kNo,
40 GrProtected::kNo);
Greg Daniel52e16d92018-04-10 09:34:07 -040041 REPORTER_ASSERT(reporter, backendTex.isValid());
42
43 GrVkImageInfo info;
44 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
45 VkImageLayout initLayout = info.fImageLayout;
46
47 // Verify that setting that layout via a copy of a backendTexture is reflected in all the
48 // backendTextures.
49 GrBackendTexture backendTexCopy = backendTex;
50 REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info));
51 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
52
53 backendTexCopy.setVkImageLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
54
55 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
56 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == info.fImageLayout);
57
58 REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info));
59 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL == info.fImageLayout);
60
61 // Setting back the layout since we didn't actually change it
62 backendTex.setVkImageLayout(initLayout);
63
64 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(context, backendTex,
65 kTopLeft_GrSurfaceOrigin,
66 kRGBA_8888_SkColorType,
67 kPremul_SkAlphaType, nullptr);
68 REPORTER_ASSERT(reporter, wrappedImage.get());
69
Robert Phillips6603a172019-03-05 12:35:44 -050070 sk_sp<GrTextureProxy> texProxy = as_IB(wrappedImage)->asTextureProxyRef(context);
Greg Daniel52e16d92018-04-10 09:34:07 -040071 REPORTER_ASSERT(reporter, texProxy.get());
Brian Salomonfd98c2c2018-07-31 17:25:29 -040072 REPORTER_ASSERT(reporter, texProxy->isInstantiated());
73 GrTexture* texture = texProxy->peekTexture();
Greg Daniel52e16d92018-04-10 09:34:07 -040074 REPORTER_ASSERT(reporter, texture);
75
76 // Verify that modifying the layout via the GrVkTexture is reflected in the GrBackendTexture
77 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
78 REPORTER_ASSERT(reporter, initLayout == vkTexture->currentLayout());
79 vkTexture->updateImageLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
80
81 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
82 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == info.fImageLayout);
83
84 GrBackendTexture backendTexImage = wrappedImage->getBackendTexture(false);
85 REPORTER_ASSERT(reporter, backendTexImage.getVkImageInfo(&info));
86 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL == info.fImageLayout);
87
88 // Verify that modifying the layout via the GrBackendTexutre is reflected in the GrVkTexture
89 backendTexImage.setVkImageLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
90 REPORTER_ASSERT(reporter, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL == vkTexture->currentLayout());
91
Greg Daniel52e16d92018-04-10 09:34:07 -040092 vkTexture->updateImageLayout(initLayout);
93
94 REPORTER_ASSERT(reporter, backendTex.getVkImageInfo(&info));
95 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
96
97 REPORTER_ASSERT(reporter, backendTexCopy.getVkImageInfo(&info));
98 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
99
100 REPORTER_ASSERT(reporter, backendTexImage.getVkImageInfo(&info));
101 REPORTER_ASSERT(reporter, initLayout == info.fImageLayout);
102
103 // Check that we can do things like assigning the backend texture to invalid one, assign an
104 // invalid one, assin a backend texture to inself etc. Success here is that we don't hit any of
105 // our ref counting asserts.
106 REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(backendTex, backendTexCopy));
107
108 GrBackendTexture invalidTexture;
109 REPORTER_ASSERT(reporter, !invalidTexture.isValid());
110 REPORTER_ASSERT(reporter, !GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTexCopy));
111
112 backendTexCopy = invalidTexture;
113 REPORTER_ASSERT(reporter, !backendTexCopy.isValid());
114 REPORTER_ASSERT(reporter, !GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTexCopy));
115
116 invalidTexture = backendTex;
117 REPORTER_ASSERT(reporter, invalidTexture.isValid());
118 REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(invalidTexture, backendTex));
119
Ben Wagnerff134f22018-04-24 16:29:16 -0400120 invalidTexture = static_cast<decltype(invalidTexture)&>(invalidTexture);
Greg Daniel52e16d92018-04-10 09:34:07 -0400121 REPORTER_ASSERT(reporter, invalidTexture.isValid());
122 REPORTER_ASSERT(reporter, GrBackendTexture::TestingOnly_Equals(invalidTexture, invalidTexture));
123
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400124 context->deleteBackendTexture(backendTex);
Greg Daniel52e16d92018-04-10 09:34:07 -0400125}
126
Greg Daniel59dc1482019-02-22 10:46:38 -0500127static void testing_release_proc(void* ctx) {
128 int* count = (int*)ctx;
129 *count += 1;
130}
131
132// Test to make sure we don't call our release proc on an image until we've transferred it back to
133// its original queue family.
134DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkReleaseExternalQueueTest, reporter, ctxInfo) {
135 GrContext* context = ctxInfo.grContext();
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400136 GrGpu* gpu = context->priv().getGpu();
137 GrVkGpu* vkGpu = static_cast<GrVkGpu*>(gpu);
138 if (!vkGpu->vkCaps().supportsExternalMemory()) {
Greg Daniel59dc1482019-02-22 10:46:38 -0500139 return;
140 }
141
142 for (bool useExternal : {false, true}) {
Robert Phillips4bdd36f2019-06-04 11:03:06 -0400143 GrBackendTexture backendTex = context->createBackendTexture(1, 1,
144 kRGBA_8888_SkColorType,
145 SkColors::kTransparent,
146 GrMipMapped::kNo,
Robert Phillipsda2e67a2019-07-01 15:04:06 -0400147 GrRenderable::kNo,
148 GrProtected::kNo);
Greg Daniel59dc1482019-02-22 10:46:38 -0500149 sk_sp<SkImage> image;
150 int count = 0;
151 if (useExternal) {
152 // Make a backend texture with an external queue family;
153 GrVkImageInfo vkInfo;
154 if (!backendTex.getVkImageInfo(&vkInfo)) {
155 return;
156 }
157 vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
158
159 GrBackendTexture vkExtTex(1, 1, vkInfo);
160 REPORTER_ASSERT(reporter, vkExtTex.isValid());
161 image = SkImage::MakeFromTexture(context, vkExtTex,
162 kTopLeft_GrSurfaceOrigin,
163 kRGBA_8888_SkColorType,
164 kPremul_SkAlphaType,
165 nullptr, testing_release_proc,
166 (void*)&count);
167
168 } else {
169 image = SkImage::MakeFromTexture(context, backendTex,
170 kTopLeft_GrSurfaceOrigin,
171 kRGBA_8888_SkColorType,
172 kPremul_SkAlphaType,
173 nullptr, testing_release_proc,
174 (void*)&count);
175 }
176
177 if (!image) {
178 continue;
179 }
180
181 REPORTER_ASSERT(reporter, !count);
182
183 GrTexture* texture = image->getTexture();
184 REPORTER_ASSERT(reporter, texture);
185 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
186
187 if (useExternal) {
188 // Testing helper so we claim that we don't need to transition from our fake external
189 // queue first.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400190 vkTex->setCurrentQueueFamilyToGraphicsQueue(vkGpu);
Greg Daniel59dc1482019-02-22 10:46:38 -0500191 }
192
193 image.reset();
194
195 // Resetting the image should only trigger the release proc if we are not using an external
196 // queue. When using an external queue when we free the SkImage and the underlying
197 // GrTexture, we submit a queue transition on the command buffer.
198 if (useExternal) {
199 REPORTER_ASSERT(reporter, !count);
200 } else {
201 REPORTER_ASSERT(reporter, count == 1);
202 }
203
204 gpu->testingOnly_flushGpuAndSync();
205
206 // Now that we flushed and waited the release proc should have be triggered.
207 REPORTER_ASSERT(reporter, count == 1);
208
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400209 context->deleteBackendTexture(backendTex);
Greg Daniel59dc1482019-02-22 10:46:38 -0500210 }
211}
212
Greg Daniel797efca2019-05-09 14:04:20 -0400213// Test to make sure we transition to the original queue when requests for prepareforexternalio are
214// in flush calls
215DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkPrepareForExternalIOQueueTransitionTest, reporter, ctxInfo) {
216 GrContext* context = ctxInfo.grContext();
Robert Phillips9b16f812019-05-17 10:01:21 -0400217
218 GrVkGpu* vkGpu = static_cast<GrVkGpu*>(context->priv().getGpu());
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400219 if (!vkGpu->vkCaps().supportsExternalMemory()) {
Greg Daniel797efca2019-05-09 14:04:20 -0400220 return;
221 }
222
223 for (bool useSurface : {false, true}) {
224 for (bool preparePresent : {false, true}) {
225 if (!useSurface && preparePresent) {
226 // We don't set textures to present
227 continue;
228 }
Robert Phillips4bdd36f2019-06-04 11:03:06 -0400229 GrBackendTexture backendTex = context->createBackendTexture(
Robert Phillips80626792019-06-04 07:16:10 -0400230 4, 4, kRGBA_8888_SkColorType,
231 SkColors::kTransparent, GrMipMapped::kNo,
Robert Phillipsda2e67a2019-07-01 15:04:06 -0400232 useSurface ? GrRenderable::kYes : GrRenderable::kNo,
233 GrProtected::kNo);
Greg Daniel797efca2019-05-09 14:04:20 -0400234
235 // Make a backend texture with an external queue family and general layout.
236 GrVkImageInfo vkInfo;
237 if (!backendTex.getVkImageInfo(&vkInfo)) {
238 return;
239 }
240
241 // We can't actually make an external texture in our test. However, we lie and say it is
242 // and then will manually go and swap the queue to the graphics queue once we wrap it.
243 if (preparePresent) {
244 // We don't transition to present to things that are going to external for foreign
245 // queues.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400246 vkInfo.fCurrentQueueFamily = vkGpu->queueIndex();
Greg Daniel797efca2019-05-09 14:04:20 -0400247 } else {
248 vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
249 }
250
251 GrBackendTexture vkExtTex(1, 1, vkInfo);
252
253 sk_sp<SkImage> image;
254 sk_sp<SkSurface> surface;
255 GrTexture* texture;
256 if (useSurface) {
257 surface = SkSurface::MakeFromBackendTexture(context, vkExtTex,
258 kTopLeft_GrSurfaceOrigin, 0, kRGBA_8888_SkColorType, nullptr, nullptr);
259 REPORTER_ASSERT(reporter, surface.get());
260 if (!surface) {
261 continue;
262 }
263 SkSurface_Gpu* gpuSurface = static_cast<SkSurface_Gpu*>(surface.get());
264 auto* rtc = gpuSurface->getDevice()->accessRenderTargetContext();
265 texture = rtc->asTextureProxy()->peekTexture();
266 } else {
267 image = SkImage::MakeFromTexture(context, vkExtTex, kTopLeft_GrSurfaceOrigin,
268 kRGBA_8888_SkColorType, kPremul_SkAlphaType, nullptr, nullptr, nullptr);
269
270 REPORTER_ASSERT(reporter, image.get());
271 if (!image) {
272 continue;
273 }
274
275 texture = image->getTexture();
276 }
277
278 REPORTER_ASSERT(reporter, texture);
279 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
280
281 // Testing helper so we claim that we don't need to transition from our fake external
282 // queue first.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400283 vkTex->setCurrentQueueFamilyToGraphicsQueue(vkGpu);
Greg Daniel797efca2019-05-09 14:04:20 -0400284
285 GrBackendTexture newBackendTexture;
286 if (useSurface) {
287 newBackendTexture = surface->getBackendTexture(
288 SkSurface::kFlushRead_TextureHandleAccess);
289 } else {
290 newBackendTexture = image->getBackendTexture(false);
291 }
292 GrVkImageInfo newVkInfo;
293 REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo));
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400294 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex());
Greg Daniel797efca2019-05-09 14:04:20 -0400295 VkImageLayout oldLayout = newVkInfo.fImageLayout;
296
297 GrPrepareForExternalIORequests externalRequests;
298 SkImage* imagePtr;
299 SkSurface* surfacePtr;
300 if (useSurface) {
301 externalRequests.fNumSurfaces = 1;
302 surfacePtr = surface.get();
303 externalRequests.fSurfaces = &surfacePtr;
304 externalRequests.fPrepareSurfaceForPresent = &preparePresent;
305 } else {
306 externalRequests.fNumImages = 1;
307 imagePtr = image.get();
308 externalRequests.fImages = &imagePtr;
309
310 }
311 context->flush(GrFlushInfo(), externalRequests);
312
313 if (useSurface) {
314 newBackendTexture = surface->getBackendTexture(
315 SkSurface::kFlushRead_TextureHandleAccess);
316 } else {
317 newBackendTexture = image->getBackendTexture(false);
318 }
319 REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo));
320 if (preparePresent) {
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400321 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex());
Greg Daniel797efca2019-05-09 14:04:20 -0400322 REPORTER_ASSERT(reporter,
323 newVkInfo.fImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
324 } else {
325 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == VK_QUEUE_FAMILY_EXTERNAL);
326 REPORTER_ASSERT(reporter, newVkInfo.fImageLayout == oldLayout);
327 }
328
329 GrFlushInfo flushInfo;
330 flushInfo.fFlags = kSyncCpu_GrFlushFlag;
331 context->flush(flushInfo);
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400332 context->deleteBackendTexture(backendTex);
Greg Daniel797efca2019-05-09 14:04:20 -0400333 }
334 }
335}
336
Greg Danieleed7d632019-06-26 13:48:12 -0400337// This test is disabled because it executes illegal vulkan calls which cause the validations layers
338// to fail and makes us assert. Once fixed to use a valid vulkan call sequence it should be
339// renenabled, see skbug.com/8936.
340#if 0
Eric Karl3f219cb2019-03-22 17:46:55 -0700341// Test to make sure we transition from the EXTERNAL queue even when no layout transition is needed.
342DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkTransitionExternalQueueTest, reporter, ctxInfo) {
343 GrContext* context = ctxInfo.grContext();
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400344 GrGpu* gpu = context->priv().getGpu();
345 GrVkGpu* vkGpu = static_cast<GrVkGpu*>(gpu);
346 if (!vkGpu->vkCaps().supportsExternalMemory()) {
Eric Karl3f219cb2019-03-22 17:46:55 -0700347 return;
348 }
349
Robert Phillips4bdd36f2019-06-04 11:03:06 -0400350 GrBackendTexture backendTex = context->createBackendTexture(
Robert Phillips80626792019-06-04 07:16:10 -0400351 1, 1, kRGBA_8888_SkColorType,
352 SkColors::kTransparent, GrMipMapped::kNo, GrRenderable::kNo);
Eric Karl3f219cb2019-03-22 17:46:55 -0700353 sk_sp<SkImage> image;
354 // Make a backend texture with an external queue family and general layout.
355 GrVkImageInfo vkInfo;
356 if (!backendTex.getVkImageInfo(&vkInfo)) {
357 return;
358 }
359 vkInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
360 // Use a read-only layout as these are the ones where we can otherwise skip a transition.
361 vkInfo.fImageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
362
363 GrBackendTexture vkExtTex(1, 1, vkInfo);
364 REPORTER_ASSERT(reporter, vkExtTex.isValid());
365 image = SkImage::MakeFromTexture(context, vkExtTex, kTopLeft_GrSurfaceOrigin,
366 kRGBA_8888_SkColorType, kPremul_SkAlphaType, nullptr, nullptr,
367 nullptr);
368
369 if (!image) {
370 return;
371 }
372
373 GrTexture* texture = image->getTexture();
374 REPORTER_ASSERT(reporter, texture);
375 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
376
377 // Change our backend texture to the internal queue, with the same layout. This should force a
378 // queue transition even though the layouts match.
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400379 vkTex->setImageLayout(vkGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 0,
Eric Karl3f219cb2019-03-22 17:46:55 -0700380 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, false, false);
381
382 // Get our image info again and make sure we transitioned queues.
383 GrBackendTexture newBackendTexture = image->getBackendTexture(true);
384 GrVkImageInfo newVkInfo;
385 REPORTER_ASSERT(reporter, newBackendTexture.getVkImageInfo(&newVkInfo));
Robert Phillips9dbcdcc2019-05-13 10:40:06 -0400386 REPORTER_ASSERT(reporter, newVkInfo.fCurrentQueueFamily == vkGpu->queueIndex());
Eric Karl3f219cb2019-03-22 17:46:55 -0700387
388 image.reset();
389 gpu->testingOnly_flushGpuAndSync();
Robert Phillips5c7a25b2019-05-20 08:38:07 -0400390 context->deleteBackendTexture(backendTex);
Eric Karl3f219cb2019-03-22 17:46:55 -0700391}
Greg Danieleed7d632019-06-26 13:48:12 -0400392#endif
Eric Karl3f219cb2019-03-22 17:46:55 -0700393
Greg Daniel52e16d92018-04-10 09:34:07 -0400394#endif