blob: 95a319297a304bedfb194370d64f13a973885195 [file] [log] [blame]
Greg Daniel24d861d2019-01-30 15:13:22 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkTypes.h"
Greg Daniel24d861d2019-01-30 15:13:22 -050011
12#if SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
Mike Reedac9f0c92020-12-23 10:11:33 -050014#include "include/core/SkBitmap.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "include/core/SkCanvas.h"
16#include "include/core/SkImage.h"
17#include "include/core/SkSurface.h"
18#include "include/gpu/GrBackendSemaphore.h"
Robert Phillips6d344c32020-07-06 10:56:46 -040019#include "include/gpu/GrDirectContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "include/gpu/vk/GrVkBackendContext.h"
21#include "include/gpu/vk/GrVkExtensions.h"
22#include "src/core/SkAutoMalloc.h"
Adlai Hollera0693042020-10-14 11:23:11 -040023#include "src/gpu/GrDirectContextPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/GrGpu.h"
25#include "src/gpu/GrProxyProvider.h"
26#include "src/gpu/SkGr.h"
27#include "src/gpu/gl/GrGLDefines.h"
28#include "src/gpu/gl/GrGLUtil.h"
29#include "tests/Test.h"
30#include "tools/gpu/GrContextFactory.h"
31#include "tools/gpu/vk/VkTestUtils.h"
Greg Daniel24d861d2019-01-30 15:13:22 -050032
33#include <android/hardware_buffer.h>
34#include <cinttypes>
35
36#include <EGL/egl.h>
37#include <EGL/eglext.h>
38#include <GLES/gl.h>
39#include <GLES/glext.h>
40
41static const int DEV_W = 16, DEV_H = 16;
42
43class BaseTestHelper {
44public:
45 virtual ~BaseTestHelper() {}
46
47 virtual bool init(skiatest::Reporter* reporter) = 0;
48
49 virtual void cleanup() = 0;
50 virtual void releaseImage() = 0;
51
52 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
53 AHardwareBuffer* buffer) = 0;
54 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
55 AHardwareBuffer* buffer) = 0;
56
57 virtual void doClientSync() = 0;
58 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
59 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
60 sk_sp<SkSurface>) = 0;
61
62 virtual void makeCurrent() = 0;
63
Robert Phillips6d344c32020-07-06 10:56:46 -040064 virtual GrDirectContext* directContext() = 0;
Greg Daniel24d861d2019-01-30 15:13:22 -050065
66 int getFdHandle() { return fFdHandle; }
67
68protected:
69 BaseTestHelper() {}
70
71 int fFdHandle = 0;
72};
73
John Rosascoa9b348f2019-11-08 13:18:15 -080074#ifdef SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -050075class EGLTestHelper : public BaseTestHelper {
76public:
77 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
78
79 ~EGLTestHelper() override {}
80
81 void releaseImage() override {
82 this->makeCurrent();
83 if (!fGLCtx) {
84 return;
85 }
86 if (EGL_NO_IMAGE_KHR != fImage) {
87 fGLCtx->destroyEGLImage(fImage);
88 fImage = EGL_NO_IMAGE_KHR;
89 }
90 if (fTexID) {
91 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
92 fTexID = 0;
93 }
94 }
95
96 void cleanup() override {
97 this->releaseImage();
98 }
99
100 bool init(skiatest::Reporter* reporter) override;
101
102 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
103 AHardwareBuffer* buffer) override;
104 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
105 AHardwareBuffer* buffer) override;
106
107 void doClientSync() override;
108 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
109 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
110 sk_sp<SkSurface>) override;
111
112 void makeCurrent() override { fGLCtx->makeCurrent(); }
113
Robert Phillips6d344c32020-07-06 10:56:46 -0400114 GrDirectContext* directContext() override { return fDirectContext; }
Greg Daniel24d861d2019-01-30 15:13:22 -0500115
116private:
117 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
118
119 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
120 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
121 const EGLint*);
122 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
123 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
124 EGLCreateImageKHRProc fEGLCreateImageKHR;
125 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
126
127 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
128 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
129 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
130 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
131 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
132
133 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
134 GrGLuint fTexID = 0;
135
136 sk_gpu_test::GrContextFactory fFactory;
137 sk_gpu_test::ContextInfo fGLESContextInfo;
138
139 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
Robert Phillips6d344c32020-07-06 10:56:46 -0400140 GrDirectContext* fDirectContext = nullptr;
Greg Daniel24d861d2019-01-30 15:13:22 -0500141};
142
143bool EGLTestHelper::init(skiatest::Reporter* reporter) {
144 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType);
Robert Phillips6d344c32020-07-06 10:56:46 -0400145 fDirectContext = fGLESContextInfo.directContext();
Greg Daniel24d861d2019-01-30 15:13:22 -0500146 fGLCtx = fGLESContextInfo.glContext();
Robert Phillips6d344c32020-07-06 10:56:46 -0400147 if (!fDirectContext || !fGLCtx) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500148 return false;
149 }
150
151 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
152 return false;
153 }
154
155 // Confirm we have egl and the needed extensions
156 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
157 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
158 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
159 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
Greg Danielf8e60e42019-05-24 14:03:37 -0400160 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync") ||
161 !fGLCtx->gl()->hasExtension("EGL_ANDROID_native_fence_sync")) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500162 return false;
163 }
164
165 fEGLGetNativeClientBufferANDROID =
166 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
167 if (!fEGLGetNativeClientBufferANDROID) {
168 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
169 return false;
170 }
171
172 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
173 if (!fEGLCreateImageKHR) {
174 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
175 return false;
176 }
177
178 fEGLImageTargetTexture2DOES =
179 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
180 if (!fEGLImageTargetTexture2DOES) {
181 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
182 return false;
183 }
184
185 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
186 if (!fEGLCreateSyncKHR) {
187 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
188 return false;
189
190 }
191 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
192 if (!fEGLWaitSyncKHR) {
193 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
194 return false;
195
196 }
197 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
198 if (!fEGLGetSyncAttribKHR) {
199 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
200 return false;
201
202 }
203 fEGLDupNativeFenceFDANDROID =
204 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
205 if (!fEGLDupNativeFenceFDANDROID) {
206 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
207 return false;
208
209 }
210 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
211 if (!fEGLDestroySyncKHR) {
212 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
213 return false;
214
215 }
216
217 return true;
218}
219
220bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
Brian Salomon24069eb2020-06-24 10:19:52 -0400221 while (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {}
Greg Daniel24d861d2019-01-30 15:13:22 -0500222
223 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
224 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
225 EGL_NONE };
226 EGLDisplay eglDisplay = eglGetCurrentDisplay();
227 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
228 EGL_NATIVE_BUFFER_ANDROID,
229 eglClientBuffer, eglAttribs);
230 if (EGL_NO_IMAGE_KHR == fImage) {
231 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
232 return false;
233 }
234
235 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
236 if (!fTexID) {
237 ERRORF(reporter, "Failed to create GL Texture");
238 return false;
239 }
240 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
Brian Salomon24069eb2020-06-24 10:19:52 -0400241 if (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500242 ERRORF(reporter, "Failed to bind GL Texture");
243 return false;
244 }
245
246 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
Brian Salomon24069eb2020-06-24 10:19:52 -0400247 if (GrGLenum error = fGLCtx->gl()->fFunctions.fGetError(); error != GR_GL_NO_ERROR) {
248 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) error);
Greg Daniel24d861d2019-01-30 15:13:22 -0500249 return false;
250 }
251
Robert Phillips6d344c32020-07-06 10:56:46 -0400252 fDirectContext->resetContext(kTextureBinding_GrGLBackendState);
Greg Daniel24d861d2019-01-30 15:13:22 -0500253 return true;
254}
255
256sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
257 AHardwareBuffer* buffer) {
258 if (!this->importHardwareBuffer(reporter, buffer)) {
259 return nullptr;
260 }
261 GrGLTextureInfo textureInfo;
262 textureInfo.fTarget = GR_GL_TEXTURE_2D;
263 textureInfo.fID = fTexID;
264 textureInfo.fFormat = GR_GL_RGBA8;
265
Brian Salomon7e67dca2020-07-21 09:27:25 -0400266 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipmapped::kNo, textureInfo);
Greg Daniel24d861d2019-01-30 15:13:22 -0500267 REPORTER_ASSERT(reporter, backendTex.isValid());
268
Robert Phillips6d344c32020-07-06 10:56:46 -0400269 sk_sp<SkImage> image = SkImage::MakeFromTexture(fDirectContext,
270 backendTex,
271 kTopLeft_GrSurfaceOrigin,
272 kRGBA_8888_SkColorType,
273 kPremul_SkAlphaType,
274 nullptr);
Greg Daniel24d861d2019-01-30 15:13:22 -0500275
276 if (!image) {
277 ERRORF(reporter, "Failed to make wrapped GL SkImage");
278 return nullptr;
279 }
280
281 return image;
282}
283
284sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
285 AHardwareBuffer* buffer) {
286 if (!this->importHardwareBuffer(reporter, buffer)) {
287 return nullptr;
288 }
289 GrGLTextureInfo textureInfo;
290 textureInfo.fTarget = GR_GL_TEXTURE_2D;
291 textureInfo.fID = fTexID;
292 textureInfo.fFormat = GR_GL_RGBA8;
293
Brian Salomon7e67dca2020-07-21 09:27:25 -0400294 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipmapped::kNo, textureInfo);
Greg Daniel24d861d2019-01-30 15:13:22 -0500295 REPORTER_ASSERT(reporter, backendTex.isValid());
296
Robert Phillips6d344c32020-07-06 10:56:46 -0400297 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fDirectContext,
Greg Daniel24d861d2019-01-30 15:13:22 -0500298 backendTex,
299 kTopLeft_GrSurfaceOrigin,
300 0,
301 kRGBA_8888_SkColorType,
302 nullptr, nullptr);
303
304 if (!surface) {
305 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
306 return nullptr;
307 }
308
309 return surface;
310}
311
312bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
313 sk_sp<SkSurface> surface) {
314 EGLDisplay eglDisplay = eglGetCurrentDisplay();
315 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
316 if (EGL_NO_SYNC_KHR == eglsync) {
317 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
318 return false;
319 }
320
Greg Daniel0a2464f2020-05-14 15:45:44 -0400321 surface->flushAndSubmit();
Greg Daniel24d861d2019-01-30 15:13:22 -0500322 GR_GL_CALL(fGLCtx->gl(), Flush());
323 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
324
325 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
326 if (EGL_TRUE != result) {
327 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
328 return false;
329 }
330
331 return true;
332}
333
334bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
335 sk_sp<SkSurface> surface) {
336 EGLDisplay eglDisplay = eglGetCurrentDisplay();
337 EGLint attr[] = {
338 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
339 EGL_NONE
340 };
341 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
342 if (EGL_NO_SYNC_KHR == eglsync) {
343 ERRORF(reporter,
344 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
345 return false;
346 }
347 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
348 if (EGL_TRUE != result) {
349 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
350 // Don't return false yet, try to delete the sync first
351 }
352 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
353 if (EGL_TRUE != result) {
354 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
355 return false;
356 }
357 return true;
358}
359
360void EGLTestHelper::doClientSync() {
Robert Phillips6d344c32020-07-06 10:56:46 -0400361 this->directContext()->flush();
362 this->directContext()->submit(true);
Greg Daniel24d861d2019-01-30 15:13:22 -0500363}
John Rosascoa9b348f2019-11-08 13:18:15 -0800364#endif // SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -0500365
366#define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
367
368#define ACQUIRE_INST_VK_PROC(name) \
Greg Daniel02497d42020-02-21 15:46:27 -0500369 do { \
Greg Daniel24d861d2019-01-30 15:13:22 -0500370 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
371 VK_NULL_HANDLE)); \
372 if (fVk##name == nullptr) { \
373 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
374 return false; \
Greg Daniel02497d42020-02-21 15:46:27 -0500375 } \
376 } while(false)
Greg Daniel24d861d2019-01-30 15:13:22 -0500377
378#define ACQUIRE_DEVICE_VK_PROC(name) \
Greg Daniel02497d42020-02-21 15:46:27 -0500379 do { \
Greg Daniel24d861d2019-01-30 15:13:22 -0500380 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
381 if (fVk##name == nullptr) { \
382 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
383 return false; \
Greg Daniel02497d42020-02-21 15:46:27 -0500384 } \
385 } while(false)
Greg Daniel24d861d2019-01-30 15:13:22 -0500386
387class VulkanTestHelper : public BaseTestHelper {
388public:
389 VulkanTestHelper() {}
390
391 ~VulkanTestHelper() override {}
392
393 void releaseImage() override {
394 if (VK_NULL_HANDLE == fDevice) {
395 return;
396 }
397 if (fImage != VK_NULL_HANDLE) {
398 fVkDestroyImage(fDevice, fImage, nullptr);
399 fImage = VK_NULL_HANDLE;
400 }
401
402 if (fMemory != VK_NULL_HANDLE) {
403 fVkFreeMemory(fDevice, fMemory, nullptr);
404 fMemory = VK_NULL_HANDLE;
405 }
406 }
407 void cleanup() override {
Robert Phillips6d344c32020-07-06 10:56:46 -0400408 fDirectContext.reset();
Greg Daniel822d2232019-02-05 15:54:24 -0500409 this->releaseImage();
410 if (fSignalSemaphore != VK_NULL_HANDLE) {
411 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
412 fSignalSemaphore = VK_NULL_HANDLE;
413 }
Greg Daniel24d861d2019-01-30 15:13:22 -0500414 fBackendContext.fMemoryAllocator.reset();
415 if (fDevice != VK_NULL_HANDLE) {
416 fVkDeviceWaitIdle(fDevice);
417 fVkDestroyDevice(fDevice, nullptr);
418 fDevice = VK_NULL_HANDLE;
419 }
420#ifdef SK_ENABLE_VK_LAYERS
421 if (fDebugCallback != VK_NULL_HANDLE) {
422 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
423 }
424#endif
425 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
426 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
427 fBackendContext.fInstance = VK_NULL_HANDLE;
428 }
429
430 delete fExtensions;
431
432 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
433 delete fFeatures;
434 }
435
436 bool init(skiatest::Reporter* reporter) override;
437
438 void doClientSync() override {
Robert Phillips6d344c32020-07-06 10:56:46 -0400439 if (!fDirectContext) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500440 return;
441 }
442
Brian Salomon725f1582021-02-12 12:10:43 -0500443 fDirectContext->submit(true);
Greg Daniel24d861d2019-01-30 15:13:22 -0500444 }
445
446 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
447 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
448 sk_sp<SkSurface>) override;
449
450 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
451 AHardwareBuffer* buffer) override;
452
453 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
454 AHardwareBuffer* buffer) override;
455
456 void makeCurrent() override {}
457
Robert Phillips6d344c32020-07-06 10:56:46 -0400458 GrDirectContext* directContext() override { return fDirectContext.get(); }
Greg Daniel24d861d2019-01-30 15:13:22 -0500459
460private:
461 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
462
463 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
464 GrVkImageInfo* outImageInfo);
465
466 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
467 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
468
469 DECLARE_VK_PROC(DestroyInstance);
470 DECLARE_VK_PROC(DeviceWaitIdle);
471 DECLARE_VK_PROC(DestroyDevice);
472
473 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
474 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
475 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
476
477 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
478
479 DECLARE_VK_PROC(CreateImage);
480 DECLARE_VK_PROC(GetImageMemoryRequirements2);
481 DECLARE_VK_PROC(DestroyImage);
482
483 DECLARE_VK_PROC(AllocateMemory);
484 DECLARE_VK_PROC(BindImageMemory2);
485 DECLARE_VK_PROC(FreeMemory);
486
487 DECLARE_VK_PROC(CreateSemaphore);
488 DECLARE_VK_PROC(GetSemaphoreFdKHR);
489 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
490 DECLARE_VK_PROC(DestroySemaphore);
491
492 VkImage fImage = VK_NULL_HANDLE;
493 VkDeviceMemory fMemory = VK_NULL_HANDLE;
494
495 GrVkExtensions* fExtensions = nullptr;
496 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
497 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
498 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
499
Greg Daniel822d2232019-02-05 15:54:24 -0500500 // We hold on to the semaphore so we can delete once the GPU is done.
501 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
502
Greg Daniel24d861d2019-01-30 15:13:22 -0500503 VkDevice fDevice = VK_NULL_HANDLE;
504
505 GrVkBackendContext fBackendContext;
Robert Phillips6d344c32020-07-06 10:56:46 -0400506 sk_sp<GrDirectContext> fDirectContext;
Greg Daniel24d861d2019-01-30 15:13:22 -0500507};
508
509bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
510 PFN_vkGetInstanceProcAddr instProc;
511 PFN_vkGetDeviceProcAddr devProc;
512 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
513 return false;
514 }
515 auto getProc = [&instProc, &devProc](const char* proc_name,
516 VkInstance instance, VkDevice device) {
517 if (device != VK_NULL_HANDLE) {
518 return devProc(device, proc_name);
519 }
520 return instProc(instance, proc_name);
521 };
522
523 fExtensions = new GrVkExtensions();
524 fFeatures = new VkPhysicalDeviceFeatures2;
525 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
526 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
527 fFeatures->pNext = nullptr;
528
529 fBackendContext.fInstance = VK_NULL_HANDLE;
530 fBackendContext.fDevice = VK_NULL_HANDLE;
531
532 if (!sk_gpu_test::CreateVkBackendContext(getProc, &fBackendContext, fExtensions,
533 fFeatures, &fDebugCallback)) {
534 return false;
535 }
536 fDevice = fBackendContext.fDevice;
537
538 if (fDebugCallback != VK_NULL_HANDLE) {
539 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
540 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
541 }
542
543 ACQUIRE_INST_VK_PROC(DestroyInstance);
544 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
545 ACQUIRE_INST_VK_PROC(DestroyDevice);
546
547 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
548 2)) {
549 return false;
550 }
551 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
552 return false;
553 }
554 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
555 return false;
556 }
557 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
558 // return false;
559 }
560
561 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
562 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
563 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
564
565 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
566
567 ACQUIRE_DEVICE_VK_PROC(CreateImage);
568 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
569 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
570
571 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
572 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
573 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
574
575 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
576 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
577 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
578 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
579
Robert Phillipsf4f80112020-07-13 16:13:31 -0400580 fDirectContext = GrDirectContext::MakeVulkan(fBackendContext);
Robert Phillips6d344c32020-07-06 10:56:46 -0400581 REPORTER_ASSERT(reporter, fDirectContext.get());
582 if (!fDirectContext) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500583 return false;
584 }
585
586 return this->checkOptimalHardwareBuffer(reporter);
587}
588
589bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
590 VkResult err;
591
592 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
593 externalImageFormatInfo.sType =
594 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
595 externalImageFormatInfo.pNext = nullptr;
596 externalImageFormatInfo.handleType =
597 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
598 //externalImageFormatInfo.handType = 0x80;
599
600 // We will create the hardware buffer with gpu sampled so these usages should all be valid
601 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
602 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
603 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
604 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
605 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
606 imageFormatInfo.pNext = &externalImageFormatInfo;
607 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
608 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
609 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
610 imageFormatInfo.usage = usageFlags;
611 imageFormatInfo.flags = 0;
612
613 VkAndroidHardwareBufferUsageANDROID hwbUsage;
614 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
615 hwbUsage.pNext = nullptr;
616
617 VkExternalImageFormatProperties externalImgFormatProps;
618 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
619 externalImgFormatProps.pNext = &hwbUsage;
620
621 VkImageFormatProperties2 imgFormProps;
622 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
623 imgFormProps.pNext = &externalImgFormatProps;
624
625 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
626 &imageFormatInfo, &imgFormProps);
627 if (VK_SUCCESS != err) {
628 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
629 return false;
630 }
631
632 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
633 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
634 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
635
636 const VkExternalMemoryProperties& externalImageFormatProps =
637 externalImgFormatProps.externalMemoryProperties;
638 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
639 externalImageFormatProps.externalMemoryFeatures));
640 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
641 externalImageFormatProps.externalMemoryFeatures));
642
643 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
644 hwbUsage.androidHardwareBufferUsage));
645
646 return true;
647}
648
649bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
650 AHardwareBuffer* buffer,
651 bool forWrite,
652 GrVkImageInfo* outImageInfo) {
653 VkResult err;
654
655 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
656 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
657 hwbFormatProps.pNext = nullptr;
658
659 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
660 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
661 hwbProps.pNext = &hwbFormatProps;
662
663 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
664 if (VK_SUCCESS != err) {
Greg Daniel932dd072019-01-31 14:07:58 -0500665 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
Greg Daniel24d861d2019-01-30 15:13:22 -0500666 return false;
667 }
668
669 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
670 REPORTER_ASSERT(reporter,
671 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
672 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
673 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
674 if (forWrite) {
675 REPORTER_ASSERT(reporter,
676 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
677
678 }
679
680 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
681 const VkExternalFormatANDROID externalFormatInfo {
682 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
683 nullptr, // pNext
684 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
685 };
686
687 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
688 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
689 &externalFormatInfo, // pNext
Greg Daniel24d861d2019-01-30 15:13:22 -0500690 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
Greg Daniel24d861d2019-01-30 15:13:22 -0500691 };
692
693 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
694 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
695 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
696 if (forWrite) {
697 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
698 }
699
700 const VkImageCreateInfo imageCreateInfo = {
701 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
702 &externalMemoryImageInfo, // pNext
703 0, // VkImageCreateFlags
704 VK_IMAGE_TYPE_2D, // VkImageType
705 hwbFormatProps.format, // VkFormat
706 { DEV_W, DEV_H, 1 }, // VkExtent3D
707 1, // mipLevels
708 1, // arrayLayers
709 VK_SAMPLE_COUNT_1_BIT, // samples
710 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
711 usageFlags, // VkImageUsageFlags
712 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
713 0, // queueFamilyCount
714 0, // pQueueFamilyIndices
715 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
716 };
717
718 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
719 if (VK_SUCCESS != err) {
720 ERRORF(reporter, "Create Image failed, err: %d", err);
721 return false;
722 }
723
Greg Daniel24d861d2019-01-30 15:13:22 -0500724 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
725 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
726 phyDevMemProps.pNext = nullptr;
727
728 uint32_t typeIndex = 0;
729 uint32_t heapIndex = 0;
730 bool foundHeap = false;
731 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
732 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
733 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
734 if (hwbProps.memoryTypeBits & (1 << i)) {
735 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
736 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
737 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
738 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
739 typeIndex = i;
740 heapIndex = pdmp.memoryTypes[i].heapIndex;
741 foundHeap = true;
742 }
743 }
744 }
745 if (!foundHeap) {
746 ERRORF(reporter, "Failed to find valid heap for imported memory");
747 return false;
748 }
749
750 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
751 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
752 hwbImportInfo.pNext = nullptr;
753 hwbImportInfo.buffer = buffer;
754
755 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
756 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
757 dedicatedAllocInfo.pNext = &hwbImportInfo;
758 dedicatedAllocInfo.image = fImage;
759 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
760
761 VkMemoryAllocateInfo allocInfo = {
762 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
763 &dedicatedAllocInfo, // pNext
764 hwbProps.allocationSize, // allocationSize
765 typeIndex, // memoryTypeIndex
766 };
767
768 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
769 if (VK_SUCCESS != err) {
770 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
771 return false;
772 }
773
774 VkBindImageMemoryInfo bindImageInfo;
775 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
776 bindImageInfo.pNext = nullptr;
777 bindImageInfo.image = fImage;
778 bindImageInfo.memory = fMemory;
779 bindImageInfo.memoryOffset = 0;
780
781 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
782 if (VK_SUCCESS != err) {
783 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
784 return false;
785 }
786
Greg Daniel6a4e1452020-08-20 14:35:18 -0400787 GrVkAlloc alloc;
788 alloc.fMemory = fMemory;
789 alloc.fOffset = 0;
790 alloc.fSize = hwbProps.allocationSize;
791 alloc.fFlags = 0;
792
Greg Daniel24d861d2019-01-30 15:13:22 -0500793 outImageInfo->fImage = fImage;
Greg Daniel6a4e1452020-08-20 14:35:18 -0400794 outImageInfo->fAlloc = alloc;
Greg Daniel24d861d2019-01-30 15:13:22 -0500795 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
796 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
797 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
Greg Daniel7b62dca2020-08-21 11:26:12 -0400798 outImageInfo->fImageUsageFlags = usageFlags;
Greg Daniel24d861d2019-01-30 15:13:22 -0500799 outImageInfo->fLevelCount = 1;
800 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
801 return true;
802}
803
804sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
805 AHardwareBuffer* buffer) {
806 GrVkImageInfo imageInfo;
807 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
808 return nullptr;
809 }
810
811 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
812
Robert Phillips6d344c32020-07-06 10:56:46 -0400813 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fDirectContext.get(),
Greg Daniel24d861d2019-01-30 15:13:22 -0500814 backendTex,
815 kTopLeft_GrSurfaceOrigin,
816 kRGBA_8888_SkColorType,
817 kPremul_SkAlphaType,
818 nullptr);
819
820 if (!wrappedImage.get()) {
821 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
822 return nullptr;
823 }
824
825 return wrappedImage;
826}
827
828bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
829 sk_sp<SkSurface> surface) {
Greg Daniel0a2464f2020-05-14 15:45:44 -0400830 surface->flushAndSubmit();
Greg Daniel24d861d2019-01-30 15:13:22 -0500831 surface.reset();
832 GrBackendSemaphore semaphore;
833 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
834 return false;
835 }
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400836 GrFlushInfo info;
837 info.fNumSemaphores = 1;
838 info.fSignalSemaphores = &semaphore;
Robert Phillips6d344c32020-07-06 10:56:46 -0400839 GrSemaphoresSubmitted submitted = fDirectContext->flush(info);
840 fDirectContext->submit();
Greg Daniel24d861d2019-01-30 15:13:22 -0500841 if (GrSemaphoresSubmitted::kNo == submitted) {
Robert Phillipse94b4e12020-07-23 13:54:35 -0400842 ERRORF(reporter, "Failing call to flush on GrDirectContext");
Greg Daniel24d861d2019-01-30 15:13:22 -0500843 return false;
844 }
845 SkASSERT(semaphore.isInitialized());
846 if (!this->exportSemaphore(reporter, semaphore)) {
847 return false;
848 }
849 return true;
850}
851
852bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
853 GrBackendSemaphore* beSemaphore) {
854 // Query supported info
855 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
856 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
857 exSemInfo.pNext = nullptr;
858 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
859
860 VkExternalSemaphoreProperties exSemProps;
861 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
862 exSemProps.pNext = nullptr;
863
864 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
865 &exSemProps);
866
867 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
868 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
869 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
870 return false;
871 }
872 if (!SkToBool(exSemProps.compatibleHandleTypes &
873 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
874 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
875 return false;
876 }
877 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
878 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
879 !SkToBool(exSemProps.externalSemaphoreFeatures &
880 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
881 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
882 return false;
883 }
884
885 VkExportSemaphoreCreateInfo exportInfo;
886 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
887 exportInfo.pNext = nullptr;
888 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
889
890 VkSemaphoreCreateInfo semaphoreInfo;
891 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
892 semaphoreInfo.pNext = &exportInfo;
893 semaphoreInfo.flags = 0;
894
895 VkSemaphore semaphore;
896 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
897 if (VK_SUCCESS != err) {
898 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
899 return false;
900 }
901 beSemaphore->initVulkan(semaphore);
902 return true;
903}
904
905bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
906 const GrBackendSemaphore& beSemaphore) {
907 VkSemaphore semaphore = beSemaphore.vkSemaphore();
908 if (VK_NULL_HANDLE == semaphore) {
909 ERRORF(reporter, "Invalid vulkan handle in export call");
910 return false;
911 }
912
913 VkSemaphoreGetFdInfoKHR getFdInfo;
914 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
915 getFdInfo.pNext = nullptr;
916 getFdInfo.semaphore = semaphore;
917 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
918
919 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
920 if (VK_SUCCESS != err) {
921 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
922 return false;
923 }
Greg Daniel822d2232019-02-05 15:54:24 -0500924 fSignalSemaphore = semaphore;
Greg Daniel24d861d2019-01-30 15:13:22 -0500925 return true;
926}
927
928bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
929 sk_sp<SkSurface> surface) {
930 VkSemaphoreCreateInfo semaphoreInfo;
931 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
932 semaphoreInfo.pNext = nullptr;
933 semaphoreInfo.flags = 0;
934
935 VkSemaphore semaphore;
936 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
937 if (VK_SUCCESS != err) {
938 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
939 return false;
940 }
941
942 VkImportSemaphoreFdInfoKHR importInfo;
943 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
944 importInfo.pNext = nullptr;
945 importInfo.semaphore = semaphore;
946 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
947 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
948 importInfo.fd = fdHandle;
949
950 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
951 if (VK_SUCCESS != err) {
952 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
953 return false;
954 }
955
956 GrBackendSemaphore beSemaphore;
957 beSemaphore.initVulkan(semaphore);
958 if (!surface->wait(1, &beSemaphore)) {
959 ERRORF(reporter, "Failed to add wait semaphore to surface");
960 fVkDestroySemaphore(fDevice, semaphore, nullptr);
961 return false;
962 }
963 return true;
964}
965
966sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
967 AHardwareBuffer* buffer) {
968 GrVkImageInfo imageInfo;
969 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
970 return nullptr;
971 }
972
973 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
974
Robert Phillips6d344c32020-07-06 10:56:46 -0400975 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fDirectContext.get(),
Greg Daniel24d861d2019-01-30 15:13:22 -0500976 backendTex,
977 kTopLeft_GrSurfaceOrigin,
978 0,
979 kRGBA_8888_SkColorType,
980 nullptr, nullptr);
981
982 if (!surface.get()) {
983 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
984 return nullptr;
985 }
986
987 return surface;
988}
989
990static SkPMColor get_src_color(int x, int y) {
991 SkASSERT(x >= 0 && x < DEV_W);
992 SkASSERT(y >= 0 && y < DEV_H);
993
994 U8CPU r = x;
995 U8CPU g = y;
996 U8CPU b = 0xc;
997
998 U8CPU a = 0xff;
999 switch ((x+y) % 5) {
1000 case 0:
1001 a = 0xff;
1002 break;
1003 case 1:
1004 a = 0x80;
1005 break;
1006 case 2:
1007 a = 0xCC;
1008 break;
1009 case 4:
1010 a = 0x01;
1011 break;
1012 case 3:
1013 a = 0x00;
1014 break;
1015 }
1016 a = 0xff;
1017 return SkPremultiplyARGBInline(a, r, g, b);
1018}
1019
1020static SkBitmap make_src_bitmap() {
1021 static SkBitmap bmp;
1022 if (bmp.isNull()) {
1023 bmp.allocN32Pixels(DEV_W, DEV_H);
1024 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1025 for (int y = 0; y < DEV_H; ++y) {
1026 for (int x = 0; x < DEV_W; ++x) {
1027 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1028 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1029 *pixel = get_src_color(x, y);
1030 }
1031 }
1032 }
1033 return bmp;
1034}
1035
1036static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1037 const SkBitmap& dstBitmap) {
1038 bool result = true;
1039 for (int y = 0; y < DEV_H && result; ++y) {
1040 for (int x = 0; x < DEV_W && result; ++x) {
1041 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1042 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1043 if (srcPixel != dstPixel) {
1044 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1045 x, y, srcPixel, dstPixel);
1046 result = false;
1047 } /*else {
1048 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1049 x, y, srcPixel, dstPixel);
1050
1051 }*/
1052 }
1053 }
1054 return result;
1055}
1056
1057static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1058 AHardwareBuffer* buffer) {
1059 if (srcHelper) {
1060 srcHelper->cleanup();
1061 }
1062 if (dstHelper) {
1063 dstHelper->cleanup();
1064 }
1065 if (buffer) {
1066 AHardwareBuffer_release(buffer);
1067 }
1068}
1069
1070enum class SrcType {
1071 kCPU,
1072 kEGL,
1073 kVulkan,
1074};
1075
1076enum class DstType {
1077 kEGL,
1078 kVulkan,
1079};
1080
1081void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1082 SrcType srcType, DstType dstType, bool shareSyncs) {
1083 if (SrcType::kCPU == srcType && shareSyncs) {
1084 // We don't currently test this since we don't do any syncs in this case.
1085 return;
1086 }
1087 std::unique_ptr<BaseTestHelper> srcHelper;
1088 std::unique_ptr<BaseTestHelper> dstHelper;
1089 AHardwareBuffer* buffer = nullptr;
1090 if (SrcType::kVulkan == srcType) {
1091 srcHelper.reset(new VulkanTestHelper());
1092 } else if (SrcType::kEGL == srcType) {
John Rosascoa9b348f2019-11-08 13:18:15 -08001093#ifdef SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -05001094 srcHelper.reset(new EGLTestHelper(options));
John Rosascoa9b348f2019-11-08 13:18:15 -08001095#else
1096 SkASSERT(false, "SrcType::kEGL used without OpenGL support.");
1097#endif
Greg Daniel24d861d2019-01-30 15:13:22 -05001098 }
1099 if (srcHelper) {
1100 if (!srcHelper->init(reporter)) {
1101 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1102 return;
1103 }
1104 }
1105
1106 if (DstType::kVulkan == dstType) {
1107 dstHelper.reset(new VulkanTestHelper());
1108 } else {
John Rosascoa9b348f2019-11-08 13:18:15 -08001109#ifdef SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -05001110 SkASSERT(DstType::kEGL == dstType);
1111 dstHelper.reset(new EGLTestHelper(options));
John Rosascoa9b348f2019-11-08 13:18:15 -08001112#else
1113 SkASSERT(false, "DstType::kEGL used without OpenGL support.");
1114#endif
Greg Daniel24d861d2019-01-30 15:13:22 -05001115 }
1116 if (dstHelper) {
1117 if (!dstHelper->init(reporter)) {
1118 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1119 return;
1120 }
1121 }
1122
1123 ///////////////////////////////////////////////////////////////////////////
1124 // Setup SkBitmaps
1125 ///////////////////////////////////////////////////////////////////////////
1126
1127 SkBitmap srcBitmap = make_src_bitmap();
1128 SkBitmap dstBitmapSurface;
1129 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1130 SkBitmap dstBitmapFinal;
1131 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1132
1133 ///////////////////////////////////////////////////////////////////////////
1134 // Setup AHardwareBuffer
1135 ///////////////////////////////////////////////////////////////////////////
1136
1137 AHardwareBuffer_Desc hwbDesc;
1138 hwbDesc.width = DEV_W;
1139 hwbDesc.height = DEV_H;
1140 hwbDesc.layers = 1;
1141 if (SrcType::kCPU == srcType) {
1142 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1143 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1144 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1145 } else {
1146 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1147 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1148 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1149 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1150 }
1151 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1152 // The following three are not used in the allocate
1153 hwbDesc.stride = 0;
1154 hwbDesc.rfu0= 0;
1155 hwbDesc.rfu1= 0;
1156
1157 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1158 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1159 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1160 return;
1161 }
1162
1163 if (SrcType::kCPU == srcType) {
1164 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1165 AHardwareBuffer_describe(buffer, &hwbDesc);
1166
1167 uint32_t* bufferAddr;
1168 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1169 reinterpret_cast<void**>(&bufferAddr))) {
1170 ERRORF(reporter, "Failed to lock hardware buffer");
1171 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1172 return;
1173 }
1174
1175 int bbp = srcBitmap.bytesPerPixel();
1176 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1177 uint32_t* dst = bufferAddr;
1178 for (int y = 0; y < DEV_H; ++y) {
1179 memcpy(dst, src, DEV_W * bbp);
1180 src += DEV_W;
1181 dst += hwbDesc.stride;
1182 }
1183
1184 for (int y = 0; y < DEV_H; ++y) {
1185 for (int x = 0; x < DEV_W; ++x) {
1186 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1187 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1188 if (srcPixel != dstPixel) {
1189 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1190 x, y, srcPixel, dstPixel);
1191 }
1192 }
1193 }
1194
1195 AHardwareBuffer_unlock(buffer, nullptr);
1196
1197 } else {
1198 srcHelper->makeCurrent();
1199 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1200
1201 if (!surface) {
1202 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1203 return;
1204 }
1205
1206 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap);
1207 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1208
1209 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1210 // to occur.
1211 if (!shareSyncs) {
1212 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1213 if (!readResult) {
1214 ERRORF(reporter, "Read Pixels on surface failed");
1215 surface.reset();
1216 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1217 return;
1218 }
1219 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1220 }
1221
1222 ///////////////////////////////////////////////////////////////////////////
1223 // Cleanup GL/EGL and add syncs
1224 ///////////////////////////////////////////////////////////////////////////
1225
1226 if (shareSyncs) {
1227 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1228 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1229 return;
1230 }
1231 } else {
1232 surface.reset();
1233 srcHelper->doClientSync();
1234 srcHelper->releaseImage();
1235 }
1236 }
1237
1238 ///////////////////////////////////////////////////////////////////////////
1239 // Import the HWB into backend and draw it to a surface
1240 ///////////////////////////////////////////////////////////////////////////
1241
1242 dstHelper->makeCurrent();
1243 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1244
1245 if (!wrappedImage) {
1246 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1247 return;
1248 }
1249
Robert Phillips6d344c32020-07-06 10:56:46 -04001250 auto direct = dstHelper->directContext();
Greg Daniel24d861d2019-01-30 15:13:22 -05001251
1252 // Make SkSurface to render wrapped HWB into.
1253 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1254 kPremul_SkAlphaType, nullptr);
1255
Robert Phillips6d344c32020-07-06 10:56:46 -04001256 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(direct,
Greg Daniel24d861d2019-01-30 15:13:22 -05001257 SkBudgeted::kNo, imageInfo, 0,
1258 kTopLeft_GrSurfaceOrigin,
1259 nullptr, false);
1260 if (!dstSurf.get()) {
1261 ERRORF(reporter, "Failed to create destination SkSurface");
1262 wrappedImage.reset();
1263 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1264 return;
1265 }
1266
1267 if (shareSyncs) {
1268 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1269 wrappedImage.reset();
1270 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1271 return;
1272 }
1273 }
1274 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1275
1276 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1277 if (!readResult) {
1278 ERRORF(reporter, "Read Pixels failed");
1279 wrappedImage.reset();
1280 dstSurf.reset();
1281 dstHelper->doClientSync();
1282 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1283 return;
1284 }
1285
1286 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1287
1288 dstSurf.reset();
1289 wrappedImage.reset();
1290 dstHelper->doClientSync();
1291 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1292}
1293
1294DEF_GPUTEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options) {
1295 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1296}
1297
Robert Phillips0efc01d2019-11-06 17:19:30 +00001298DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan, reporter, options) {
1299 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1300}
1301
John Rosascoa9b348f2019-11-08 13:18:15 -08001302DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs, reporter, options) {
1303 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1304}
1305
1306#if defined(SK_GL)
1307DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options) {
1308 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1309}
1310
Greg Daniel24d861d2019-01-30 15:13:22 -05001311DEF_GPUTEST(VulkanHardwareBuffer_CPU_EGL, reporter, options) {
1312 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1313}
1314
1315DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL, reporter, options) {
1316 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1317}
1318
1319DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options) {
1320 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1321}
1322
1323DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL_Syncs, reporter, options) {
1324 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1325}
1326
1327DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs, reporter, options) {
1328 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1329}
1330
1331DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs, reporter, options) {
1332 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1333}
Robert Phillips0efc01d2019-11-06 17:19:30 +00001334#endif
John Rosasco078cf3e2019-10-31 16:21:39 -07001335
John Rosascoa9b348f2019-11-08 13:18:15 -08001336#endif // SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) &&
1337 // __ANDROID_API__ >= 26 && defined(SK_VULKAN)
1338