blob: 2e189ff2afcb47528e9c3622cb54bcba4ff88671 [file] [log] [blame]
Greg Daniel24d861d2019-01-30 15:13:22 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkTypes.h"
Greg Daniel24d861d2019-01-30 15:13:22 -050011
12#if SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "include/core/SkCanvas.h"
15#include "include/core/SkImage.h"
16#include "include/core/SkSurface.h"
17#include "include/gpu/GrBackendSemaphore.h"
Robert Phillips6d344c32020-07-06 10:56:46 -040018#include "include/gpu/GrDirectContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050019#include "include/gpu/vk/GrVkBackendContext.h"
20#include "include/gpu/vk/GrVkExtensions.h"
21#include "src/core/SkAutoMalloc.h"
22#include "src/gpu/GrContextPriv.h"
23#include "src/gpu/GrGpu.h"
24#include "src/gpu/GrProxyProvider.h"
25#include "src/gpu/SkGr.h"
26#include "src/gpu/gl/GrGLDefines.h"
27#include "src/gpu/gl/GrGLUtil.h"
28#include "tests/Test.h"
29#include "tools/gpu/GrContextFactory.h"
30#include "tools/gpu/vk/VkTestUtils.h"
Greg Daniel24d861d2019-01-30 15:13:22 -050031
32#include <android/hardware_buffer.h>
33#include <cinttypes>
34
35#include <EGL/egl.h>
36#include <EGL/eglext.h>
37#include <GLES/gl.h>
38#include <GLES/glext.h>
39
40static const int DEV_W = 16, DEV_H = 16;
41
42class BaseTestHelper {
43public:
44 virtual ~BaseTestHelper() {}
45
46 virtual bool init(skiatest::Reporter* reporter) = 0;
47
48 virtual void cleanup() = 0;
49 virtual void releaseImage() = 0;
50
51 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
52 AHardwareBuffer* buffer) = 0;
53 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
54 AHardwareBuffer* buffer) = 0;
55
56 virtual void doClientSync() = 0;
57 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
58 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
59 sk_sp<SkSurface>) = 0;
60
61 virtual void makeCurrent() = 0;
62
Robert Phillips6d344c32020-07-06 10:56:46 -040063 virtual GrDirectContext* directContext() = 0;
Greg Daniel24d861d2019-01-30 15:13:22 -050064
65 int getFdHandle() { return fFdHandle; }
66
67protected:
68 BaseTestHelper() {}
69
70 int fFdHandle = 0;
71};
72
John Rosascoa9b348f2019-11-08 13:18:15 -080073#ifdef SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -050074class EGLTestHelper : public BaseTestHelper {
75public:
76 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
77
78 ~EGLTestHelper() override {}
79
80 void releaseImage() override {
81 this->makeCurrent();
82 if (!fGLCtx) {
83 return;
84 }
85 if (EGL_NO_IMAGE_KHR != fImage) {
86 fGLCtx->destroyEGLImage(fImage);
87 fImage = EGL_NO_IMAGE_KHR;
88 }
89 if (fTexID) {
90 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
91 fTexID = 0;
92 }
93 }
94
95 void cleanup() override {
96 this->releaseImage();
97 }
98
99 bool init(skiatest::Reporter* reporter) override;
100
101 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
102 AHardwareBuffer* buffer) override;
103 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
104 AHardwareBuffer* buffer) override;
105
106 void doClientSync() override;
107 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
108 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
109 sk_sp<SkSurface>) override;
110
111 void makeCurrent() override { fGLCtx->makeCurrent(); }
112
Robert Phillips6d344c32020-07-06 10:56:46 -0400113 GrDirectContext* directContext() override { return fDirectContext; }
Greg Daniel24d861d2019-01-30 15:13:22 -0500114
115private:
116 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
117
118 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
119 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
120 const EGLint*);
121 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
122 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
123 EGLCreateImageKHRProc fEGLCreateImageKHR;
124 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
125
126 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
127 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
128 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
129 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
130 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
131
132 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
133 GrGLuint fTexID = 0;
134
135 sk_gpu_test::GrContextFactory fFactory;
136 sk_gpu_test::ContextInfo fGLESContextInfo;
137
138 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
Robert Phillips6d344c32020-07-06 10:56:46 -0400139 GrDirectContext* fDirectContext = nullptr;
Greg Daniel24d861d2019-01-30 15:13:22 -0500140};
141
142bool EGLTestHelper::init(skiatest::Reporter* reporter) {
143 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType);
Robert Phillips6d344c32020-07-06 10:56:46 -0400144 fDirectContext = fGLESContextInfo.directContext();
Greg Daniel24d861d2019-01-30 15:13:22 -0500145 fGLCtx = fGLESContextInfo.glContext();
Robert Phillips6d344c32020-07-06 10:56:46 -0400146 if (!fDirectContext || !fGLCtx) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500147 return false;
148 }
149
150 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
151 return false;
152 }
153
154 // Confirm we have egl and the needed extensions
155 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
156 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
157 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
158 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
Greg Danielf8e60e42019-05-24 14:03:37 -0400159 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync") ||
160 !fGLCtx->gl()->hasExtension("EGL_ANDROID_native_fence_sync")) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500161 return false;
162 }
163
164 fEGLGetNativeClientBufferANDROID =
165 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
166 if (!fEGLGetNativeClientBufferANDROID) {
167 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
168 return false;
169 }
170
171 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
172 if (!fEGLCreateImageKHR) {
173 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
174 return false;
175 }
176
177 fEGLImageTargetTexture2DOES =
178 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
179 if (!fEGLImageTargetTexture2DOES) {
180 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
181 return false;
182 }
183
184 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
185 if (!fEGLCreateSyncKHR) {
186 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
187 return false;
188
189 }
190 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
191 if (!fEGLWaitSyncKHR) {
192 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
193 return false;
194
195 }
196 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
197 if (!fEGLGetSyncAttribKHR) {
198 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
199 return false;
200
201 }
202 fEGLDupNativeFenceFDANDROID =
203 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
204 if (!fEGLDupNativeFenceFDANDROID) {
205 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
206 return false;
207
208 }
209 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
210 if (!fEGLDestroySyncKHR) {
211 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
212 return false;
213
214 }
215
216 return true;
217}
218
219bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
Brian Salomon24069eb2020-06-24 10:19:52 -0400220 while (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {}
Greg Daniel24d861d2019-01-30 15:13:22 -0500221
222 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
223 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
224 EGL_NONE };
225 EGLDisplay eglDisplay = eglGetCurrentDisplay();
226 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
227 EGL_NATIVE_BUFFER_ANDROID,
228 eglClientBuffer, eglAttribs);
229 if (EGL_NO_IMAGE_KHR == fImage) {
230 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
231 return false;
232 }
233
234 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
235 if (!fTexID) {
236 ERRORF(reporter, "Failed to create GL Texture");
237 return false;
238 }
239 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
Brian Salomon24069eb2020-06-24 10:19:52 -0400240 if (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500241 ERRORF(reporter, "Failed to bind GL Texture");
242 return false;
243 }
244
245 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
Brian Salomon24069eb2020-06-24 10:19:52 -0400246 if (GrGLenum error = fGLCtx->gl()->fFunctions.fGetError(); error != GR_GL_NO_ERROR) {
247 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) error);
Greg Daniel24d861d2019-01-30 15:13:22 -0500248 return false;
249 }
250
Robert Phillips6d344c32020-07-06 10:56:46 -0400251 fDirectContext->resetContext(kTextureBinding_GrGLBackendState);
Greg Daniel24d861d2019-01-30 15:13:22 -0500252 return true;
253}
254
255sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
256 AHardwareBuffer* buffer) {
257 if (!this->importHardwareBuffer(reporter, buffer)) {
258 return nullptr;
259 }
260 GrGLTextureInfo textureInfo;
261 textureInfo.fTarget = GR_GL_TEXTURE_2D;
262 textureInfo.fID = fTexID;
263 textureInfo.fFormat = GR_GL_RGBA8;
264
265 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
266 REPORTER_ASSERT(reporter, backendTex.isValid());
267
Robert Phillips6d344c32020-07-06 10:56:46 -0400268 sk_sp<SkImage> image = SkImage::MakeFromTexture(fDirectContext,
269 backendTex,
270 kTopLeft_GrSurfaceOrigin,
271 kRGBA_8888_SkColorType,
272 kPremul_SkAlphaType,
273 nullptr);
Greg Daniel24d861d2019-01-30 15:13:22 -0500274
275 if (!image) {
276 ERRORF(reporter, "Failed to make wrapped GL SkImage");
277 return nullptr;
278 }
279
280 return image;
281}
282
283sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
284 AHardwareBuffer* buffer) {
285 if (!this->importHardwareBuffer(reporter, buffer)) {
286 return nullptr;
287 }
288 GrGLTextureInfo textureInfo;
289 textureInfo.fTarget = GR_GL_TEXTURE_2D;
290 textureInfo.fID = fTexID;
291 textureInfo.fFormat = GR_GL_RGBA8;
292
293 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
294 REPORTER_ASSERT(reporter, backendTex.isValid());
295
Robert Phillips6d344c32020-07-06 10:56:46 -0400296 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fDirectContext,
Greg Daniel24d861d2019-01-30 15:13:22 -0500297 backendTex,
298 kTopLeft_GrSurfaceOrigin,
299 0,
300 kRGBA_8888_SkColorType,
301 nullptr, nullptr);
302
303 if (!surface) {
304 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
305 return nullptr;
306 }
307
308 return surface;
309}
310
311bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
312 sk_sp<SkSurface> surface) {
313 EGLDisplay eglDisplay = eglGetCurrentDisplay();
314 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
315 if (EGL_NO_SYNC_KHR == eglsync) {
316 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
317 return false;
318 }
319
Greg Daniel0a2464f2020-05-14 15:45:44 -0400320 surface->flushAndSubmit();
Greg Daniel24d861d2019-01-30 15:13:22 -0500321 GR_GL_CALL(fGLCtx->gl(), Flush());
322 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
323
324 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
325 if (EGL_TRUE != result) {
326 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
327 return false;
328 }
329
330 return true;
331}
332
333bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
334 sk_sp<SkSurface> surface) {
335 EGLDisplay eglDisplay = eglGetCurrentDisplay();
336 EGLint attr[] = {
337 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
338 EGL_NONE
339 };
340 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
341 if (EGL_NO_SYNC_KHR == eglsync) {
342 ERRORF(reporter,
343 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
344 return false;
345 }
346 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
347 if (EGL_TRUE != result) {
348 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
349 // Don't return false yet, try to delete the sync first
350 }
351 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
352 if (EGL_TRUE != result) {
353 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
354 return false;
355 }
356 return true;
357}
358
359void EGLTestHelper::doClientSync() {
Robert Phillips6d344c32020-07-06 10:56:46 -0400360 this->directContext()->flush();
361 this->directContext()->submit(true);
Greg Daniel24d861d2019-01-30 15:13:22 -0500362}
John Rosascoa9b348f2019-11-08 13:18:15 -0800363#endif // SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -0500364
365#define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
366
367#define ACQUIRE_INST_VK_PROC(name) \
Greg Daniel02497d42020-02-21 15:46:27 -0500368 do { \
Greg Daniel24d861d2019-01-30 15:13:22 -0500369 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
370 VK_NULL_HANDLE)); \
371 if (fVk##name == nullptr) { \
372 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
373 return false; \
Greg Daniel02497d42020-02-21 15:46:27 -0500374 } \
375 } while(false)
Greg Daniel24d861d2019-01-30 15:13:22 -0500376
377#define ACQUIRE_DEVICE_VK_PROC(name) \
Greg Daniel02497d42020-02-21 15:46:27 -0500378 do { \
Greg Daniel24d861d2019-01-30 15:13:22 -0500379 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
380 if (fVk##name == nullptr) { \
381 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
382 return false; \
Greg Daniel02497d42020-02-21 15:46:27 -0500383 } \
384 } while(false)
Greg Daniel24d861d2019-01-30 15:13:22 -0500385
386class VulkanTestHelper : public BaseTestHelper {
387public:
388 VulkanTestHelper() {}
389
390 ~VulkanTestHelper() override {}
391
392 void releaseImage() override {
393 if (VK_NULL_HANDLE == fDevice) {
394 return;
395 }
396 if (fImage != VK_NULL_HANDLE) {
397 fVkDestroyImage(fDevice, fImage, nullptr);
398 fImage = VK_NULL_HANDLE;
399 }
400
401 if (fMemory != VK_NULL_HANDLE) {
402 fVkFreeMemory(fDevice, fMemory, nullptr);
403 fMemory = VK_NULL_HANDLE;
404 }
405 }
406 void cleanup() override {
Robert Phillips6d344c32020-07-06 10:56:46 -0400407 fDirectContext.reset();
Greg Daniel822d2232019-02-05 15:54:24 -0500408 this->releaseImage();
409 if (fSignalSemaphore != VK_NULL_HANDLE) {
410 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
411 fSignalSemaphore = VK_NULL_HANDLE;
412 }
Greg Daniel24d861d2019-01-30 15:13:22 -0500413 fBackendContext.fMemoryAllocator.reset();
414 if (fDevice != VK_NULL_HANDLE) {
415 fVkDeviceWaitIdle(fDevice);
416 fVkDestroyDevice(fDevice, nullptr);
417 fDevice = VK_NULL_HANDLE;
418 }
419#ifdef SK_ENABLE_VK_LAYERS
420 if (fDebugCallback != VK_NULL_HANDLE) {
421 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
422 }
423#endif
424 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
425 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
426 fBackendContext.fInstance = VK_NULL_HANDLE;
427 }
428
429 delete fExtensions;
430
431 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
432 delete fFeatures;
433 }
434
435 bool init(skiatest::Reporter* reporter) override;
436
437 void doClientSync() override {
Robert Phillips6d344c32020-07-06 10:56:46 -0400438 if (!fDirectContext) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500439 return;
440 }
441
Robert Phillips6d344c32020-07-06 10:56:46 -0400442 fDirectContext->priv().getGpu()->testingOnly_flushGpuAndSync();
Greg Daniel24d861d2019-01-30 15:13:22 -0500443 }
444
445 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
446 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
447 sk_sp<SkSurface>) override;
448
449 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
450 AHardwareBuffer* buffer) override;
451
452 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
453 AHardwareBuffer* buffer) override;
454
455 void makeCurrent() override {}
456
Robert Phillips6d344c32020-07-06 10:56:46 -0400457 GrDirectContext* directContext() override { return fDirectContext.get(); }
Greg Daniel24d861d2019-01-30 15:13:22 -0500458
459private:
460 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
461
462 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
463 GrVkImageInfo* outImageInfo);
464
465 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
466 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
467
468 DECLARE_VK_PROC(DestroyInstance);
469 DECLARE_VK_PROC(DeviceWaitIdle);
470 DECLARE_VK_PROC(DestroyDevice);
471
472 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
473 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
474 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
475
476 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
477
478 DECLARE_VK_PROC(CreateImage);
479 DECLARE_VK_PROC(GetImageMemoryRequirements2);
480 DECLARE_VK_PROC(DestroyImage);
481
482 DECLARE_VK_PROC(AllocateMemory);
483 DECLARE_VK_PROC(BindImageMemory2);
484 DECLARE_VK_PROC(FreeMemory);
485
486 DECLARE_VK_PROC(CreateSemaphore);
487 DECLARE_VK_PROC(GetSemaphoreFdKHR);
488 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
489 DECLARE_VK_PROC(DestroySemaphore);
490
491 VkImage fImage = VK_NULL_HANDLE;
492 VkDeviceMemory fMemory = VK_NULL_HANDLE;
493
494 GrVkExtensions* fExtensions = nullptr;
495 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
496 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
497 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
498
Greg Daniel822d2232019-02-05 15:54:24 -0500499 // We hold on to the semaphore so we can delete once the GPU is done.
500 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
501
Greg Daniel24d861d2019-01-30 15:13:22 -0500502 VkDevice fDevice = VK_NULL_HANDLE;
503
504 GrVkBackendContext fBackendContext;
Robert Phillips6d344c32020-07-06 10:56:46 -0400505 sk_sp<GrDirectContext> fDirectContext;
Greg Daniel24d861d2019-01-30 15:13:22 -0500506};
507
508bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
509 PFN_vkGetInstanceProcAddr instProc;
510 PFN_vkGetDeviceProcAddr devProc;
511 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
512 return false;
513 }
514 auto getProc = [&instProc, &devProc](const char* proc_name,
515 VkInstance instance, VkDevice device) {
516 if (device != VK_NULL_HANDLE) {
517 return devProc(device, proc_name);
518 }
519 return instProc(instance, proc_name);
520 };
521
522 fExtensions = new GrVkExtensions();
523 fFeatures = new VkPhysicalDeviceFeatures2;
524 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
525 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
526 fFeatures->pNext = nullptr;
527
528 fBackendContext.fInstance = VK_NULL_HANDLE;
529 fBackendContext.fDevice = VK_NULL_HANDLE;
530
531 if (!sk_gpu_test::CreateVkBackendContext(getProc, &fBackendContext, fExtensions,
532 fFeatures, &fDebugCallback)) {
533 return false;
534 }
535 fDevice = fBackendContext.fDevice;
536
537 if (fDebugCallback != VK_NULL_HANDLE) {
538 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
539 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
540 }
541
542 ACQUIRE_INST_VK_PROC(DestroyInstance);
543 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
544 ACQUIRE_INST_VK_PROC(DestroyDevice);
545
546 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
547 2)) {
548 return false;
549 }
550 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
551 return false;
552 }
553 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
554 return false;
555 }
556 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
557 // return false;
558 }
559
560 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
561 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
562 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
563
564 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
565
566 ACQUIRE_DEVICE_VK_PROC(CreateImage);
567 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
568 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
569
570 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
571 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
572 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
573
574 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
575 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
576 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
577 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
578
Robert Phillipsf4f80112020-07-13 16:13:31 -0400579 fDirectContext = GrDirectContext::MakeVulkan(fBackendContext);
Robert Phillips6d344c32020-07-06 10:56:46 -0400580 REPORTER_ASSERT(reporter, fDirectContext.get());
581 if (!fDirectContext) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500582 return false;
583 }
584
585 return this->checkOptimalHardwareBuffer(reporter);
586}
587
588bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
589 VkResult err;
590
591 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
592 externalImageFormatInfo.sType =
593 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
594 externalImageFormatInfo.pNext = nullptr;
595 externalImageFormatInfo.handleType =
596 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
597 //externalImageFormatInfo.handType = 0x80;
598
599 // We will create the hardware buffer with gpu sampled so these usages should all be valid
600 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
601 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
602 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
603 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
604 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
605 imageFormatInfo.pNext = &externalImageFormatInfo;
606 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
607 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
608 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
609 imageFormatInfo.usage = usageFlags;
610 imageFormatInfo.flags = 0;
611
612 VkAndroidHardwareBufferUsageANDROID hwbUsage;
613 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
614 hwbUsage.pNext = nullptr;
615
616 VkExternalImageFormatProperties externalImgFormatProps;
617 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
618 externalImgFormatProps.pNext = &hwbUsage;
619
620 VkImageFormatProperties2 imgFormProps;
621 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
622 imgFormProps.pNext = &externalImgFormatProps;
623
624 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
625 &imageFormatInfo, &imgFormProps);
626 if (VK_SUCCESS != err) {
627 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
628 return false;
629 }
630
631 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
632 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
633 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
634
635 const VkExternalMemoryProperties& externalImageFormatProps =
636 externalImgFormatProps.externalMemoryProperties;
637 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
638 externalImageFormatProps.externalMemoryFeatures));
639 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
640 externalImageFormatProps.externalMemoryFeatures));
641
642 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
643 hwbUsage.androidHardwareBufferUsage));
644
645 return true;
646}
647
648bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
649 AHardwareBuffer* buffer,
650 bool forWrite,
651 GrVkImageInfo* outImageInfo) {
652 VkResult err;
653
654 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
655 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
656 hwbFormatProps.pNext = nullptr;
657
658 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
659 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
660 hwbProps.pNext = &hwbFormatProps;
661
662 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
663 if (VK_SUCCESS != err) {
Greg Daniel932dd072019-01-31 14:07:58 -0500664 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
Greg Daniel24d861d2019-01-30 15:13:22 -0500665 return false;
666 }
667
668 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
669 REPORTER_ASSERT(reporter,
670 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
671 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
672 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
673 if (forWrite) {
674 REPORTER_ASSERT(reporter,
675 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
676
677 }
678
679 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
680 const VkExternalFormatANDROID externalFormatInfo {
681 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
682 nullptr, // pNext
683 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
684 };
685
686 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
687 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
688 &externalFormatInfo, // pNext
Greg Daniel24d861d2019-01-30 15:13:22 -0500689 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
Greg Daniel24d861d2019-01-30 15:13:22 -0500690 };
691
692 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
693 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
694 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
695 if (forWrite) {
696 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
697 }
698
699 const VkImageCreateInfo imageCreateInfo = {
700 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
701 &externalMemoryImageInfo, // pNext
702 0, // VkImageCreateFlags
703 VK_IMAGE_TYPE_2D, // VkImageType
704 hwbFormatProps.format, // VkFormat
705 { DEV_W, DEV_H, 1 }, // VkExtent3D
706 1, // mipLevels
707 1, // arrayLayers
708 VK_SAMPLE_COUNT_1_BIT, // samples
709 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
710 usageFlags, // VkImageUsageFlags
711 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
712 0, // queueFamilyCount
713 0, // pQueueFamilyIndices
714 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
715 };
716
717 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
718 if (VK_SUCCESS != err) {
719 ERRORF(reporter, "Create Image failed, err: %d", err);
720 return false;
721 }
722
Greg Daniel24d861d2019-01-30 15:13:22 -0500723 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
724 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
725 phyDevMemProps.pNext = nullptr;
726
727 uint32_t typeIndex = 0;
728 uint32_t heapIndex = 0;
729 bool foundHeap = false;
730 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
731 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
732 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
733 if (hwbProps.memoryTypeBits & (1 << i)) {
734 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
735 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
736 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
737 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
738 typeIndex = i;
739 heapIndex = pdmp.memoryTypes[i].heapIndex;
740 foundHeap = true;
741 }
742 }
743 }
744 if (!foundHeap) {
745 ERRORF(reporter, "Failed to find valid heap for imported memory");
746 return false;
747 }
748
749 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
750 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
751 hwbImportInfo.pNext = nullptr;
752 hwbImportInfo.buffer = buffer;
753
754 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
755 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
756 dedicatedAllocInfo.pNext = &hwbImportInfo;
757 dedicatedAllocInfo.image = fImage;
758 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
759
760 VkMemoryAllocateInfo allocInfo = {
761 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
762 &dedicatedAllocInfo, // pNext
763 hwbProps.allocationSize, // allocationSize
764 typeIndex, // memoryTypeIndex
765 };
766
767 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
768 if (VK_SUCCESS != err) {
769 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
770 return false;
771 }
772
773 VkBindImageMemoryInfo bindImageInfo;
774 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
775 bindImageInfo.pNext = nullptr;
776 bindImageInfo.image = fImage;
777 bindImageInfo.memory = fMemory;
778 bindImageInfo.memoryOffset = 0;
779
780 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
781 if (VK_SUCCESS != err) {
782 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
783 return false;
784 }
785
786 outImageInfo->fImage = fImage;
787 outImageInfo->fAlloc = GrVkAlloc(fMemory, 0, hwbProps.allocationSize, 0);
788 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
789 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
790 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
791 outImageInfo->fLevelCount = 1;
792 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
793 return true;
794}
795
796sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
797 AHardwareBuffer* buffer) {
798 GrVkImageInfo imageInfo;
799 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
800 return nullptr;
801 }
802
803 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
804
Robert Phillips6d344c32020-07-06 10:56:46 -0400805 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fDirectContext.get(),
Greg Daniel24d861d2019-01-30 15:13:22 -0500806 backendTex,
807 kTopLeft_GrSurfaceOrigin,
808 kRGBA_8888_SkColorType,
809 kPremul_SkAlphaType,
810 nullptr);
811
812 if (!wrappedImage.get()) {
813 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
814 return nullptr;
815 }
816
817 return wrappedImage;
818}
819
820bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
821 sk_sp<SkSurface> surface) {
Greg Daniel0a2464f2020-05-14 15:45:44 -0400822 surface->flushAndSubmit();
Greg Daniel24d861d2019-01-30 15:13:22 -0500823 surface.reset();
824 GrBackendSemaphore semaphore;
825 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
826 return false;
827 }
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400828 GrFlushInfo info;
829 info.fNumSemaphores = 1;
830 info.fSignalSemaphores = &semaphore;
Robert Phillips6d344c32020-07-06 10:56:46 -0400831 GrSemaphoresSubmitted submitted = fDirectContext->flush(info);
832 fDirectContext->submit();
Greg Daniel24d861d2019-01-30 15:13:22 -0500833 if (GrSemaphoresSubmitted::kNo == submitted) {
Greg Danielb9990e42019-04-10 16:28:52 -0400834 ERRORF(reporter, "Failing call to flush on GrContext");
Greg Daniel24d861d2019-01-30 15:13:22 -0500835 return false;
836 }
837 SkASSERT(semaphore.isInitialized());
838 if (!this->exportSemaphore(reporter, semaphore)) {
839 return false;
840 }
841 return true;
842}
843
844bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
845 GrBackendSemaphore* beSemaphore) {
846 // Query supported info
847 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
848 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
849 exSemInfo.pNext = nullptr;
850 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
851
852 VkExternalSemaphoreProperties exSemProps;
853 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
854 exSemProps.pNext = nullptr;
855
856 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
857 &exSemProps);
858
859 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
860 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
861 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
862 return false;
863 }
864 if (!SkToBool(exSemProps.compatibleHandleTypes &
865 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
866 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
867 return false;
868 }
869 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
870 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
871 !SkToBool(exSemProps.externalSemaphoreFeatures &
872 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
873 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
874 return false;
875 }
876
877 VkExportSemaphoreCreateInfo exportInfo;
878 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
879 exportInfo.pNext = nullptr;
880 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
881
882 VkSemaphoreCreateInfo semaphoreInfo;
883 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
884 semaphoreInfo.pNext = &exportInfo;
885 semaphoreInfo.flags = 0;
886
887 VkSemaphore semaphore;
888 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
889 if (VK_SUCCESS != err) {
890 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
891 return false;
892 }
893 beSemaphore->initVulkan(semaphore);
894 return true;
895}
896
897bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
898 const GrBackendSemaphore& beSemaphore) {
899 VkSemaphore semaphore = beSemaphore.vkSemaphore();
900 if (VK_NULL_HANDLE == semaphore) {
901 ERRORF(reporter, "Invalid vulkan handle in export call");
902 return false;
903 }
904
905 VkSemaphoreGetFdInfoKHR getFdInfo;
906 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
907 getFdInfo.pNext = nullptr;
908 getFdInfo.semaphore = semaphore;
909 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
910
911 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
912 if (VK_SUCCESS != err) {
913 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
914 return false;
915 }
Greg Daniel822d2232019-02-05 15:54:24 -0500916 fSignalSemaphore = semaphore;
Greg Daniel24d861d2019-01-30 15:13:22 -0500917 return true;
918}
919
920bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
921 sk_sp<SkSurface> surface) {
922 VkSemaphoreCreateInfo semaphoreInfo;
923 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
924 semaphoreInfo.pNext = nullptr;
925 semaphoreInfo.flags = 0;
926
927 VkSemaphore semaphore;
928 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
929 if (VK_SUCCESS != err) {
930 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
931 return false;
932 }
933
934 VkImportSemaphoreFdInfoKHR importInfo;
935 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
936 importInfo.pNext = nullptr;
937 importInfo.semaphore = semaphore;
938 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
939 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
940 importInfo.fd = fdHandle;
941
942 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
943 if (VK_SUCCESS != err) {
944 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
945 return false;
946 }
947
948 GrBackendSemaphore beSemaphore;
949 beSemaphore.initVulkan(semaphore);
950 if (!surface->wait(1, &beSemaphore)) {
951 ERRORF(reporter, "Failed to add wait semaphore to surface");
952 fVkDestroySemaphore(fDevice, semaphore, nullptr);
953 return false;
954 }
955 return true;
956}
957
958sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
959 AHardwareBuffer* buffer) {
960 GrVkImageInfo imageInfo;
961 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
962 return nullptr;
963 }
964
965 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
966
Robert Phillips6d344c32020-07-06 10:56:46 -0400967 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fDirectContext.get(),
Greg Daniel24d861d2019-01-30 15:13:22 -0500968 backendTex,
969 kTopLeft_GrSurfaceOrigin,
970 0,
971 kRGBA_8888_SkColorType,
972 nullptr, nullptr);
973
974 if (!surface.get()) {
975 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
976 return nullptr;
977 }
978
979 return surface;
980}
981
982static SkPMColor get_src_color(int x, int y) {
983 SkASSERT(x >= 0 && x < DEV_W);
984 SkASSERT(y >= 0 && y < DEV_H);
985
986 U8CPU r = x;
987 U8CPU g = y;
988 U8CPU b = 0xc;
989
990 U8CPU a = 0xff;
991 switch ((x+y) % 5) {
992 case 0:
993 a = 0xff;
994 break;
995 case 1:
996 a = 0x80;
997 break;
998 case 2:
999 a = 0xCC;
1000 break;
1001 case 4:
1002 a = 0x01;
1003 break;
1004 case 3:
1005 a = 0x00;
1006 break;
1007 }
1008 a = 0xff;
1009 return SkPremultiplyARGBInline(a, r, g, b);
1010}
1011
1012static SkBitmap make_src_bitmap() {
1013 static SkBitmap bmp;
1014 if (bmp.isNull()) {
1015 bmp.allocN32Pixels(DEV_W, DEV_H);
1016 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1017 for (int y = 0; y < DEV_H; ++y) {
1018 for (int x = 0; x < DEV_W; ++x) {
1019 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1020 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1021 *pixel = get_src_color(x, y);
1022 }
1023 }
1024 }
1025 return bmp;
1026}
1027
1028static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1029 const SkBitmap& dstBitmap) {
1030 bool result = true;
1031 for (int y = 0; y < DEV_H && result; ++y) {
1032 for (int x = 0; x < DEV_W && result; ++x) {
1033 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1034 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1035 if (srcPixel != dstPixel) {
1036 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1037 x, y, srcPixel, dstPixel);
1038 result = false;
1039 } /*else {
1040 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1041 x, y, srcPixel, dstPixel);
1042
1043 }*/
1044 }
1045 }
1046 return result;
1047}
1048
1049static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1050 AHardwareBuffer* buffer) {
1051 if (srcHelper) {
1052 srcHelper->cleanup();
1053 }
1054 if (dstHelper) {
1055 dstHelper->cleanup();
1056 }
1057 if (buffer) {
1058 AHardwareBuffer_release(buffer);
1059 }
1060}
1061
1062enum class SrcType {
1063 kCPU,
1064 kEGL,
1065 kVulkan,
1066};
1067
1068enum class DstType {
1069 kEGL,
1070 kVulkan,
1071};
1072
1073void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1074 SrcType srcType, DstType dstType, bool shareSyncs) {
1075 if (SrcType::kCPU == srcType && shareSyncs) {
1076 // We don't currently test this since we don't do any syncs in this case.
1077 return;
1078 }
1079 std::unique_ptr<BaseTestHelper> srcHelper;
1080 std::unique_ptr<BaseTestHelper> dstHelper;
1081 AHardwareBuffer* buffer = nullptr;
1082 if (SrcType::kVulkan == srcType) {
1083 srcHelper.reset(new VulkanTestHelper());
1084 } else if (SrcType::kEGL == srcType) {
John Rosascoa9b348f2019-11-08 13:18:15 -08001085#ifdef SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -05001086 srcHelper.reset(new EGLTestHelper(options));
John Rosascoa9b348f2019-11-08 13:18:15 -08001087#else
1088 SkASSERT(false, "SrcType::kEGL used without OpenGL support.");
1089#endif
Greg Daniel24d861d2019-01-30 15:13:22 -05001090 }
1091 if (srcHelper) {
1092 if (!srcHelper->init(reporter)) {
1093 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1094 return;
1095 }
1096 }
1097
1098 if (DstType::kVulkan == dstType) {
1099 dstHelper.reset(new VulkanTestHelper());
1100 } else {
John Rosascoa9b348f2019-11-08 13:18:15 -08001101#ifdef SK_GL
Greg Daniel24d861d2019-01-30 15:13:22 -05001102 SkASSERT(DstType::kEGL == dstType);
1103 dstHelper.reset(new EGLTestHelper(options));
John Rosascoa9b348f2019-11-08 13:18:15 -08001104#else
1105 SkASSERT(false, "DstType::kEGL used without OpenGL support.");
1106#endif
Greg Daniel24d861d2019-01-30 15:13:22 -05001107 }
1108 if (dstHelper) {
1109 if (!dstHelper->init(reporter)) {
1110 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1111 return;
1112 }
1113 }
1114
1115 ///////////////////////////////////////////////////////////////////////////
1116 // Setup SkBitmaps
1117 ///////////////////////////////////////////////////////////////////////////
1118
1119 SkBitmap srcBitmap = make_src_bitmap();
1120 SkBitmap dstBitmapSurface;
1121 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1122 SkBitmap dstBitmapFinal;
1123 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1124
1125 ///////////////////////////////////////////////////////////////////////////
1126 // Setup AHardwareBuffer
1127 ///////////////////////////////////////////////////////////////////////////
1128
1129 AHardwareBuffer_Desc hwbDesc;
1130 hwbDesc.width = DEV_W;
1131 hwbDesc.height = DEV_H;
1132 hwbDesc.layers = 1;
1133 if (SrcType::kCPU == srcType) {
1134 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1135 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1136 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1137 } else {
1138 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1139 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1140 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1141 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1142 }
1143 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1144 // The following three are not used in the allocate
1145 hwbDesc.stride = 0;
1146 hwbDesc.rfu0= 0;
1147 hwbDesc.rfu1= 0;
1148
1149 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1150 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1151 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1152 return;
1153 }
1154
1155 if (SrcType::kCPU == srcType) {
1156 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1157 AHardwareBuffer_describe(buffer, &hwbDesc);
1158
1159 uint32_t* bufferAddr;
1160 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1161 reinterpret_cast<void**>(&bufferAddr))) {
1162 ERRORF(reporter, "Failed to lock hardware buffer");
1163 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1164 return;
1165 }
1166
1167 int bbp = srcBitmap.bytesPerPixel();
1168 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1169 uint32_t* dst = bufferAddr;
1170 for (int y = 0; y < DEV_H; ++y) {
1171 memcpy(dst, src, DEV_W * bbp);
1172 src += DEV_W;
1173 dst += hwbDesc.stride;
1174 }
1175
1176 for (int y = 0; y < DEV_H; ++y) {
1177 for (int x = 0; x < DEV_W; ++x) {
1178 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1179 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1180 if (srcPixel != dstPixel) {
1181 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1182 x, y, srcPixel, dstPixel);
1183 }
1184 }
1185 }
1186
1187 AHardwareBuffer_unlock(buffer, nullptr);
1188
1189 } else {
1190 srcHelper->makeCurrent();
1191 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1192
1193 if (!surface) {
1194 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1195 return;
1196 }
1197
1198 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap);
1199 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1200
1201 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1202 // to occur.
1203 if (!shareSyncs) {
1204 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1205 if (!readResult) {
1206 ERRORF(reporter, "Read Pixels on surface failed");
1207 surface.reset();
1208 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1209 return;
1210 }
1211 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1212 }
1213
1214 ///////////////////////////////////////////////////////////////////////////
1215 // Cleanup GL/EGL and add syncs
1216 ///////////////////////////////////////////////////////////////////////////
1217
1218 if (shareSyncs) {
1219 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1220 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1221 return;
1222 }
1223 } else {
1224 surface.reset();
1225 srcHelper->doClientSync();
1226 srcHelper->releaseImage();
1227 }
1228 }
1229
1230 ///////////////////////////////////////////////////////////////////////////
1231 // Import the HWB into backend and draw it to a surface
1232 ///////////////////////////////////////////////////////////////////////////
1233
1234 dstHelper->makeCurrent();
1235 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1236
1237 if (!wrappedImage) {
1238 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1239 return;
1240 }
1241
Robert Phillips6d344c32020-07-06 10:56:46 -04001242 auto direct = dstHelper->directContext();
Greg Daniel24d861d2019-01-30 15:13:22 -05001243
1244 // Make SkSurface to render wrapped HWB into.
1245 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1246 kPremul_SkAlphaType, nullptr);
1247
Robert Phillips6d344c32020-07-06 10:56:46 -04001248 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(direct,
Greg Daniel24d861d2019-01-30 15:13:22 -05001249 SkBudgeted::kNo, imageInfo, 0,
1250 kTopLeft_GrSurfaceOrigin,
1251 nullptr, false);
1252 if (!dstSurf.get()) {
1253 ERRORF(reporter, "Failed to create destination SkSurface");
1254 wrappedImage.reset();
1255 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1256 return;
1257 }
1258
1259 if (shareSyncs) {
1260 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1261 wrappedImage.reset();
1262 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1263 return;
1264 }
1265 }
1266 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1267
1268 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1269 if (!readResult) {
1270 ERRORF(reporter, "Read Pixels failed");
1271 wrappedImage.reset();
1272 dstSurf.reset();
1273 dstHelper->doClientSync();
1274 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1275 return;
1276 }
1277
1278 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1279
1280 dstSurf.reset();
1281 wrappedImage.reset();
1282 dstHelper->doClientSync();
1283 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1284}
1285
1286DEF_GPUTEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options) {
1287 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1288}
1289
Robert Phillips0efc01d2019-11-06 17:19:30 +00001290DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan, reporter, options) {
1291 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1292}
1293
John Rosascoa9b348f2019-11-08 13:18:15 -08001294DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs, reporter, options) {
1295 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1296}
1297
1298#if defined(SK_GL)
1299DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options) {
1300 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1301}
1302
Greg Daniel24d861d2019-01-30 15:13:22 -05001303DEF_GPUTEST(VulkanHardwareBuffer_CPU_EGL, reporter, options) {
1304 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1305}
1306
1307DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL, reporter, options) {
1308 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1309}
1310
1311DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options) {
1312 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1313}
1314
1315DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL_Syncs, reporter, options) {
1316 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1317}
1318
1319DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs, reporter, options) {
1320 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1321}
1322
1323DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs, reporter, options) {
1324 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1325}
Robert Phillips0efc01d2019-11-06 17:19:30 +00001326#endif
John Rosasco078cf3e2019-10-31 16:21:39 -07001327
John Rosascoa9b348f2019-11-08 13:18:15 -08001328#endif // SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) &&
1329 // __ANDROID_API__ >= 26 && defined(SK_VULKAN)
1330