blob: 07563cb06358a632e3edca82b4c2e9fe15a1b065 [file] [log] [blame]
Greg Daniel24d861d2019-01-30 15:13:22 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkTypes.h"
Greg Daniel24d861d2019-01-30 15:13:22 -050011
12#if SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "include/core/SkCanvas.h"
15#include "include/core/SkImage.h"
16#include "include/core/SkSurface.h"
17#include "include/gpu/GrBackendSemaphore.h"
18#include "include/gpu/GrContext.h"
19#include "include/gpu/vk/GrVkBackendContext.h"
20#include "include/gpu/vk/GrVkExtensions.h"
21#include "src/core/SkAutoMalloc.h"
22#include "src/gpu/GrContextPriv.h"
23#include "src/gpu/GrGpu.h"
24#include "src/gpu/GrProxyProvider.h"
25#include "src/gpu/SkGr.h"
26#include "src/gpu/gl/GrGLDefines.h"
27#include "src/gpu/gl/GrGLUtil.h"
28#include "tests/Test.h"
29#include "tools/gpu/GrContextFactory.h"
30#include "tools/gpu/vk/VkTestUtils.h"
Greg Daniel24d861d2019-01-30 15:13:22 -050031
32#include <android/hardware_buffer.h>
33#include <cinttypes>
34
35#include <EGL/egl.h>
36#include <EGL/eglext.h>
37#include <GLES/gl.h>
38#include <GLES/glext.h>
39
40static const int DEV_W = 16, DEV_H = 16;
41
42class BaseTestHelper {
43public:
44 virtual ~BaseTestHelper() {}
45
46 virtual bool init(skiatest::Reporter* reporter) = 0;
47
48 virtual void cleanup() = 0;
49 virtual void releaseImage() = 0;
50
51 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
52 AHardwareBuffer* buffer) = 0;
53 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
54 AHardwareBuffer* buffer) = 0;
55
56 virtual void doClientSync() = 0;
57 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
58 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
59 sk_sp<SkSurface>) = 0;
60
61 virtual void makeCurrent() = 0;
62
63 virtual GrContext* grContext() = 0;
64
65 int getFdHandle() { return fFdHandle; }
66
67protected:
68 BaseTestHelper() {}
69
70 int fFdHandle = 0;
71};
72
73class EGLTestHelper : public BaseTestHelper {
74public:
75 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
76
77 ~EGLTestHelper() override {}
78
79 void releaseImage() override {
80 this->makeCurrent();
81 if (!fGLCtx) {
82 return;
83 }
84 if (EGL_NO_IMAGE_KHR != fImage) {
85 fGLCtx->destroyEGLImage(fImage);
86 fImage = EGL_NO_IMAGE_KHR;
87 }
88 if (fTexID) {
89 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
90 fTexID = 0;
91 }
92 }
93
94 void cleanup() override {
95 this->releaseImage();
96 }
97
98 bool init(skiatest::Reporter* reporter) override;
99
100 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
101 AHardwareBuffer* buffer) override;
102 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
103 AHardwareBuffer* buffer) override;
104
105 void doClientSync() override;
106 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
107 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
108 sk_sp<SkSurface>) override;
109
110 void makeCurrent() override { fGLCtx->makeCurrent(); }
111
112 GrContext* grContext() override { return fGrContext; }
113
114private:
115 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
116
117 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
118 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
119 const EGLint*);
120 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
121 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
122 EGLCreateImageKHRProc fEGLCreateImageKHR;
123 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
124
125 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
126 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
127 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
128 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
129 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
130
131 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
132 GrGLuint fTexID = 0;
133
134 sk_gpu_test::GrContextFactory fFactory;
135 sk_gpu_test::ContextInfo fGLESContextInfo;
136
137 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
138 GrContext* fGrContext = nullptr;
139};
140
141bool EGLTestHelper::init(skiatest::Reporter* reporter) {
142 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType);
143 fGrContext = fGLESContextInfo.grContext();
144 fGLCtx = fGLESContextInfo.glContext();
145 if (!fGrContext || !fGLCtx) {
146 return false;
147 }
148
149 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
150 return false;
151 }
152
153 // Confirm we have egl and the needed extensions
154 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
155 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
156 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
157 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
Greg Danielf8e60e42019-05-24 14:03:37 -0400158 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync") ||
159 !fGLCtx->gl()->hasExtension("EGL_ANDROID_native_fence_sync")) {
Greg Daniel24d861d2019-01-30 15:13:22 -0500160 return false;
161 }
162
163 fEGLGetNativeClientBufferANDROID =
164 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
165 if (!fEGLGetNativeClientBufferANDROID) {
166 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
167 return false;
168 }
169
170 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
171 if (!fEGLCreateImageKHR) {
172 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
173 return false;
174 }
175
176 fEGLImageTargetTexture2DOES =
177 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
178 if (!fEGLImageTargetTexture2DOES) {
179 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
180 return false;
181 }
182
183 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
184 if (!fEGLCreateSyncKHR) {
185 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
186 return false;
187
188 }
189 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
190 if (!fEGLWaitSyncKHR) {
191 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
192 return false;
193
194 }
195 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
196 if (!fEGLGetSyncAttribKHR) {
197 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
198 return false;
199
200 }
201 fEGLDupNativeFenceFDANDROID =
202 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
203 if (!fEGLDupNativeFenceFDANDROID) {
204 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
205 return false;
206
207 }
208 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
209 if (!fEGLDestroySyncKHR) {
210 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
211 return false;
212
213 }
214
215 return true;
216}
217
218bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
219 GrGLClearErr(fGLCtx->gl());
220
221 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
222 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
223 EGL_NONE };
224 EGLDisplay eglDisplay = eglGetCurrentDisplay();
225 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
226 EGL_NATIVE_BUFFER_ANDROID,
227 eglClientBuffer, eglAttribs);
228 if (EGL_NO_IMAGE_KHR == fImage) {
229 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
230 return false;
231 }
232
233 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
234 if (!fTexID) {
235 ERRORF(reporter, "Failed to create GL Texture");
236 return false;
237 }
238 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
239 if (GR_GL_GET_ERROR(fGLCtx->gl()) != GR_GL_NO_ERROR) {
240 ERRORF(reporter, "Failed to bind GL Texture");
241 return false;
242 }
243
244 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
245 GLenum status = GL_NO_ERROR;
246 if ((status = glGetError()) != GL_NO_ERROR) {
247 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) status);
248 return false;
249 }
250
251 fGrContext->resetContext(kTextureBinding_GrGLBackendState);
252 return true;
253}
254
255sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
256 AHardwareBuffer* buffer) {
257 if (!this->importHardwareBuffer(reporter, buffer)) {
258 return nullptr;
259 }
260 GrGLTextureInfo textureInfo;
261 textureInfo.fTarget = GR_GL_TEXTURE_2D;
262 textureInfo.fID = fTexID;
263 textureInfo.fFormat = GR_GL_RGBA8;
264
265 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
266 REPORTER_ASSERT(reporter, backendTex.isValid());
267
268 sk_sp<SkImage> image = SkImage::MakeFromTexture(fGrContext,
269 backendTex,
270 kTopLeft_GrSurfaceOrigin,
271 kRGBA_8888_SkColorType,
272 kPremul_SkAlphaType,
273 nullptr);
274
275 if (!image) {
276 ERRORF(reporter, "Failed to make wrapped GL SkImage");
277 return nullptr;
278 }
279
280 return image;
281}
282
283sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
284 AHardwareBuffer* buffer) {
285 if (!this->importHardwareBuffer(reporter, buffer)) {
286 return nullptr;
287 }
288 GrGLTextureInfo textureInfo;
289 textureInfo.fTarget = GR_GL_TEXTURE_2D;
290 textureInfo.fID = fTexID;
291 textureInfo.fFormat = GR_GL_RGBA8;
292
293 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
294 REPORTER_ASSERT(reporter, backendTex.isValid());
295
296 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext,
297 backendTex,
298 kTopLeft_GrSurfaceOrigin,
299 0,
300 kRGBA_8888_SkColorType,
301 nullptr, nullptr);
302
303 if (!surface) {
304 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
305 return nullptr;
306 }
307
308 return surface;
309}
310
311bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
312 sk_sp<SkSurface> surface) {
313 EGLDisplay eglDisplay = eglGetCurrentDisplay();
314 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
315 if (EGL_NO_SYNC_KHR == eglsync) {
316 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
317 return false;
318 }
319
320 surface->flush();
321 GR_GL_CALL(fGLCtx->gl(), Flush());
322 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
323
324 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
325 if (EGL_TRUE != result) {
326 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
327 return false;
328 }
329
330 return true;
331}
332
333bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
334 sk_sp<SkSurface> surface) {
335 EGLDisplay eglDisplay = eglGetCurrentDisplay();
336 EGLint attr[] = {
337 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
338 EGL_NONE
339 };
340 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
341 if (EGL_NO_SYNC_KHR == eglsync) {
342 ERRORF(reporter,
343 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
344 return false;
345 }
346 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
347 if (EGL_TRUE != result) {
348 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
349 // Don't return false yet, try to delete the sync first
350 }
351 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
352 if (EGL_TRUE != result) {
353 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
354 return false;
355 }
356 return true;
357}
358
359void EGLTestHelper::doClientSync() {
360 sk_gpu_test::FenceSync* fenceSync = fGLCtx->fenceSync();
361 sk_gpu_test::PlatformFence fence = fenceSync->insertFence();
362 fenceSync->waitFence(fence);
363 fenceSync->deleteFence(fence);
364}
365
366#define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
367
368#define ACQUIRE_INST_VK_PROC(name) \
369 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
370 VK_NULL_HANDLE)); \
371 if (fVk##name == nullptr) { \
372 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
373 return false; \
374 }
375
376#define ACQUIRE_DEVICE_VK_PROC(name) \
377 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
378 if (fVk##name == nullptr) { \
379 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
380 return false; \
381 }
382
383class VulkanTestHelper : public BaseTestHelper {
384public:
385 VulkanTestHelper() {}
386
387 ~VulkanTestHelper() override {}
388
389 void releaseImage() override {
390 if (VK_NULL_HANDLE == fDevice) {
391 return;
392 }
393 if (fImage != VK_NULL_HANDLE) {
394 fVkDestroyImage(fDevice, fImage, nullptr);
395 fImage = VK_NULL_HANDLE;
396 }
397
398 if (fMemory != VK_NULL_HANDLE) {
399 fVkFreeMemory(fDevice, fMemory, nullptr);
400 fMemory = VK_NULL_HANDLE;
401 }
402 }
403 void cleanup() override {
Greg Daniel24d861d2019-01-30 15:13:22 -0500404 fGrContext.reset();
Greg Daniel822d2232019-02-05 15:54:24 -0500405 this->releaseImage();
406 if (fSignalSemaphore != VK_NULL_HANDLE) {
407 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
408 fSignalSemaphore = VK_NULL_HANDLE;
409 }
Greg Daniel24d861d2019-01-30 15:13:22 -0500410 fBackendContext.fMemoryAllocator.reset();
411 if (fDevice != VK_NULL_HANDLE) {
412 fVkDeviceWaitIdle(fDevice);
413 fVkDestroyDevice(fDevice, nullptr);
414 fDevice = VK_NULL_HANDLE;
415 }
416#ifdef SK_ENABLE_VK_LAYERS
417 if (fDebugCallback != VK_NULL_HANDLE) {
418 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
419 }
420#endif
421 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
422 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
423 fBackendContext.fInstance = VK_NULL_HANDLE;
424 }
425
426 delete fExtensions;
427
428 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
429 delete fFeatures;
430 }
431
432 bool init(skiatest::Reporter* reporter) override;
433
434 void doClientSync() override {
435 if (!fGrContext) {
436 return;
437 }
438
Robert Phillips9da87e02019-02-04 13:26:26 -0500439 fGrContext->priv().getGpu()->testingOnly_flushGpuAndSync();
Greg Daniel24d861d2019-01-30 15:13:22 -0500440 }
441
442 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
443 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
444 sk_sp<SkSurface>) override;
445
446 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
447 AHardwareBuffer* buffer) override;
448
449 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
450 AHardwareBuffer* buffer) override;
451
452 void makeCurrent() override {}
453
454 GrContext* grContext() override { return fGrContext.get(); }
455
456private:
457 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
458
459 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
460 GrVkImageInfo* outImageInfo);
461
462 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
463 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
464
465 DECLARE_VK_PROC(DestroyInstance);
466 DECLARE_VK_PROC(DeviceWaitIdle);
467 DECLARE_VK_PROC(DestroyDevice);
468
469 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
470 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
471 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
472
473 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
474
475 DECLARE_VK_PROC(CreateImage);
476 DECLARE_VK_PROC(GetImageMemoryRequirements2);
477 DECLARE_VK_PROC(DestroyImage);
478
479 DECLARE_VK_PROC(AllocateMemory);
480 DECLARE_VK_PROC(BindImageMemory2);
481 DECLARE_VK_PROC(FreeMemory);
482
483 DECLARE_VK_PROC(CreateSemaphore);
484 DECLARE_VK_PROC(GetSemaphoreFdKHR);
485 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
486 DECLARE_VK_PROC(DestroySemaphore);
487
488 VkImage fImage = VK_NULL_HANDLE;
489 VkDeviceMemory fMemory = VK_NULL_HANDLE;
490
491 GrVkExtensions* fExtensions = nullptr;
492 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
493 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
494 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
495
Greg Daniel822d2232019-02-05 15:54:24 -0500496 // We hold on to the semaphore so we can delete once the GPU is done.
497 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
498
Greg Daniel24d861d2019-01-30 15:13:22 -0500499 VkDevice fDevice = VK_NULL_HANDLE;
500
501 GrVkBackendContext fBackendContext;
502 sk_sp<GrContext> fGrContext;
503};
504
505bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
506 PFN_vkGetInstanceProcAddr instProc;
507 PFN_vkGetDeviceProcAddr devProc;
508 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
509 return false;
510 }
511 auto getProc = [&instProc, &devProc](const char* proc_name,
512 VkInstance instance, VkDevice device) {
513 if (device != VK_NULL_HANDLE) {
514 return devProc(device, proc_name);
515 }
516 return instProc(instance, proc_name);
517 };
518
519 fExtensions = new GrVkExtensions();
520 fFeatures = new VkPhysicalDeviceFeatures2;
521 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
522 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
523 fFeatures->pNext = nullptr;
524
525 fBackendContext.fInstance = VK_NULL_HANDLE;
526 fBackendContext.fDevice = VK_NULL_HANDLE;
527
528 if (!sk_gpu_test::CreateVkBackendContext(getProc, &fBackendContext, fExtensions,
529 fFeatures, &fDebugCallback)) {
530 return false;
531 }
532 fDevice = fBackendContext.fDevice;
533
534 if (fDebugCallback != VK_NULL_HANDLE) {
535 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
536 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
537 }
538
539 ACQUIRE_INST_VK_PROC(DestroyInstance);
540 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
541 ACQUIRE_INST_VK_PROC(DestroyDevice);
542
543 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
544 2)) {
545 return false;
546 }
547 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
548 return false;
549 }
550 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
551 return false;
552 }
553 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
554 // return false;
555 }
556
557 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
558 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
559 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
560
561 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
562
563 ACQUIRE_DEVICE_VK_PROC(CreateImage);
564 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
565 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
566
567 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
568 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
569 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
570
571 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
572 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
573 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
574 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
575
576 fGrContext = GrContext::MakeVulkan(fBackendContext);
577 REPORTER_ASSERT(reporter, fGrContext.get());
578 if (!fGrContext) {
579 return false;
580 }
581
582 return this->checkOptimalHardwareBuffer(reporter);
583}
584
585bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
586 VkResult err;
587
588 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
589 externalImageFormatInfo.sType =
590 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
591 externalImageFormatInfo.pNext = nullptr;
592 externalImageFormatInfo.handleType =
593 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
594 //externalImageFormatInfo.handType = 0x80;
595
596 // We will create the hardware buffer with gpu sampled so these usages should all be valid
597 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
598 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
599 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
600 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
601 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
602 imageFormatInfo.pNext = &externalImageFormatInfo;
603 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
604 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
605 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
606 imageFormatInfo.usage = usageFlags;
607 imageFormatInfo.flags = 0;
608
609 VkAndroidHardwareBufferUsageANDROID hwbUsage;
610 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
611 hwbUsage.pNext = nullptr;
612
613 VkExternalImageFormatProperties externalImgFormatProps;
614 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
615 externalImgFormatProps.pNext = &hwbUsage;
616
617 VkImageFormatProperties2 imgFormProps;
618 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
619 imgFormProps.pNext = &externalImgFormatProps;
620
621 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
622 &imageFormatInfo, &imgFormProps);
623 if (VK_SUCCESS != err) {
624 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
625 return false;
626 }
627
628 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
629 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
630 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
631
632 const VkExternalMemoryProperties& externalImageFormatProps =
633 externalImgFormatProps.externalMemoryProperties;
634 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
635 externalImageFormatProps.externalMemoryFeatures));
636 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
637 externalImageFormatProps.externalMemoryFeatures));
638
639 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
640 hwbUsage.androidHardwareBufferUsage));
641
642 return true;
643}
644
645bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
646 AHardwareBuffer* buffer,
647 bool forWrite,
648 GrVkImageInfo* outImageInfo) {
649 VkResult err;
650
651 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
652 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
653 hwbFormatProps.pNext = nullptr;
654
655 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
656 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
657 hwbProps.pNext = &hwbFormatProps;
658
659 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
660 if (VK_SUCCESS != err) {
Greg Daniel932dd072019-01-31 14:07:58 -0500661 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
Greg Daniel24d861d2019-01-30 15:13:22 -0500662 return false;
663 }
664
665 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
666 REPORTER_ASSERT(reporter,
667 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
668 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
669 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
670 if (forWrite) {
671 REPORTER_ASSERT(reporter,
672 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
673
674 }
675
676 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
677 const VkExternalFormatANDROID externalFormatInfo {
678 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
679 nullptr, // pNext
680 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
681 };
682
683 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
684 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
685 &externalFormatInfo, // pNext
Greg Daniel24d861d2019-01-30 15:13:22 -0500686 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
Greg Daniel24d861d2019-01-30 15:13:22 -0500687 };
688
689 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
690 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
691 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
692 if (forWrite) {
693 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
694 }
695
696 const VkImageCreateInfo imageCreateInfo = {
697 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
698 &externalMemoryImageInfo, // pNext
699 0, // VkImageCreateFlags
700 VK_IMAGE_TYPE_2D, // VkImageType
701 hwbFormatProps.format, // VkFormat
702 { DEV_W, DEV_H, 1 }, // VkExtent3D
703 1, // mipLevels
704 1, // arrayLayers
705 VK_SAMPLE_COUNT_1_BIT, // samples
706 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
707 usageFlags, // VkImageUsageFlags
708 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
709 0, // queueFamilyCount
710 0, // pQueueFamilyIndices
711 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
712 };
713
714 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
715 if (VK_SUCCESS != err) {
716 ERRORF(reporter, "Create Image failed, err: %d", err);
717 return false;
718 }
719
Greg Daniel24d861d2019-01-30 15:13:22 -0500720 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
721 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
722 phyDevMemProps.pNext = nullptr;
723
724 uint32_t typeIndex = 0;
725 uint32_t heapIndex = 0;
726 bool foundHeap = false;
727 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
728 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
729 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
730 if (hwbProps.memoryTypeBits & (1 << i)) {
731 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
732 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
733 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
734 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
735 typeIndex = i;
736 heapIndex = pdmp.memoryTypes[i].heapIndex;
737 foundHeap = true;
738 }
739 }
740 }
741 if (!foundHeap) {
742 ERRORF(reporter, "Failed to find valid heap for imported memory");
743 return false;
744 }
745
746 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
747 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
748 hwbImportInfo.pNext = nullptr;
749 hwbImportInfo.buffer = buffer;
750
751 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
752 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
753 dedicatedAllocInfo.pNext = &hwbImportInfo;
754 dedicatedAllocInfo.image = fImage;
755 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
756
757 VkMemoryAllocateInfo allocInfo = {
758 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
759 &dedicatedAllocInfo, // pNext
760 hwbProps.allocationSize, // allocationSize
761 typeIndex, // memoryTypeIndex
762 };
763
764 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
765 if (VK_SUCCESS != err) {
766 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
767 return false;
768 }
769
770 VkBindImageMemoryInfo bindImageInfo;
771 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
772 bindImageInfo.pNext = nullptr;
773 bindImageInfo.image = fImage;
774 bindImageInfo.memory = fMemory;
775 bindImageInfo.memoryOffset = 0;
776
777 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
778 if (VK_SUCCESS != err) {
779 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
780 return false;
781 }
782
783 outImageInfo->fImage = fImage;
784 outImageInfo->fAlloc = GrVkAlloc(fMemory, 0, hwbProps.allocationSize, 0);
785 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
786 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
787 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
788 outImageInfo->fLevelCount = 1;
789 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
790 return true;
791}
792
793sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
794 AHardwareBuffer* buffer) {
795 GrVkImageInfo imageInfo;
796 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
797 return nullptr;
798 }
799
800 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
801
802 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fGrContext.get(),
803 backendTex,
804 kTopLeft_GrSurfaceOrigin,
805 kRGBA_8888_SkColorType,
806 kPremul_SkAlphaType,
807 nullptr);
808
809 if (!wrappedImage.get()) {
810 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
811 return nullptr;
812 }
813
814 return wrappedImage;
815}
816
817bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
818 sk_sp<SkSurface> surface) {
819 surface->flush();
820 surface.reset();
821 GrBackendSemaphore semaphore;
822 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
823 return false;
824 }
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400825 GrFlushInfo info;
826 info.fNumSemaphores = 1;
827 info.fSignalSemaphores = &semaphore;
828 GrSemaphoresSubmitted submitted = fGrContext->flush(info);
Greg Daniel24d861d2019-01-30 15:13:22 -0500829 if (GrSemaphoresSubmitted::kNo == submitted) {
Greg Danielb9990e42019-04-10 16:28:52 -0400830 ERRORF(reporter, "Failing call to flush on GrContext");
Greg Daniel24d861d2019-01-30 15:13:22 -0500831 return false;
832 }
833 SkASSERT(semaphore.isInitialized());
834 if (!this->exportSemaphore(reporter, semaphore)) {
835 return false;
836 }
837 return true;
838}
839
840bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
841 GrBackendSemaphore* beSemaphore) {
842 // Query supported info
843 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
844 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
845 exSemInfo.pNext = nullptr;
846 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
847
848 VkExternalSemaphoreProperties exSemProps;
849 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
850 exSemProps.pNext = nullptr;
851
852 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
853 &exSemProps);
854
855 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
856 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
857 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
858 return false;
859 }
860 if (!SkToBool(exSemProps.compatibleHandleTypes &
861 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
862 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
863 return false;
864 }
865 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
866 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
867 !SkToBool(exSemProps.externalSemaphoreFeatures &
868 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
869 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
870 return false;
871 }
872
873 VkExportSemaphoreCreateInfo exportInfo;
874 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
875 exportInfo.pNext = nullptr;
876 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
877
878 VkSemaphoreCreateInfo semaphoreInfo;
879 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
880 semaphoreInfo.pNext = &exportInfo;
881 semaphoreInfo.flags = 0;
882
883 VkSemaphore semaphore;
884 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
885 if (VK_SUCCESS != err) {
886 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
887 return false;
888 }
889 beSemaphore->initVulkan(semaphore);
890 return true;
891}
892
893bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
894 const GrBackendSemaphore& beSemaphore) {
895 VkSemaphore semaphore = beSemaphore.vkSemaphore();
896 if (VK_NULL_HANDLE == semaphore) {
897 ERRORF(reporter, "Invalid vulkan handle in export call");
898 return false;
899 }
900
901 VkSemaphoreGetFdInfoKHR getFdInfo;
902 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
903 getFdInfo.pNext = nullptr;
904 getFdInfo.semaphore = semaphore;
905 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
906
907 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
908 if (VK_SUCCESS != err) {
909 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
910 return false;
911 }
Greg Daniel822d2232019-02-05 15:54:24 -0500912 fSignalSemaphore = semaphore;
Greg Daniel24d861d2019-01-30 15:13:22 -0500913 return true;
914}
915
916bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
917 sk_sp<SkSurface> surface) {
918 VkSemaphoreCreateInfo semaphoreInfo;
919 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
920 semaphoreInfo.pNext = nullptr;
921 semaphoreInfo.flags = 0;
922
923 VkSemaphore semaphore;
924 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
925 if (VK_SUCCESS != err) {
926 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
927 return false;
928 }
929
930 VkImportSemaphoreFdInfoKHR importInfo;
931 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
932 importInfo.pNext = nullptr;
933 importInfo.semaphore = semaphore;
934 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
935 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
936 importInfo.fd = fdHandle;
937
938 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
939 if (VK_SUCCESS != err) {
940 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
941 return false;
942 }
943
944 GrBackendSemaphore beSemaphore;
945 beSemaphore.initVulkan(semaphore);
946 if (!surface->wait(1, &beSemaphore)) {
947 ERRORF(reporter, "Failed to add wait semaphore to surface");
948 fVkDestroySemaphore(fDevice, semaphore, nullptr);
949 return false;
950 }
951 return true;
952}
953
954sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
955 AHardwareBuffer* buffer) {
956 GrVkImageInfo imageInfo;
957 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
958 return nullptr;
959 }
960
961 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
962
963 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext.get(),
964 backendTex,
965 kTopLeft_GrSurfaceOrigin,
966 0,
967 kRGBA_8888_SkColorType,
968 nullptr, nullptr);
969
970 if (!surface.get()) {
971 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
972 return nullptr;
973 }
974
975 return surface;
976}
977
978static SkPMColor get_src_color(int x, int y) {
979 SkASSERT(x >= 0 && x < DEV_W);
980 SkASSERT(y >= 0 && y < DEV_H);
981
982 U8CPU r = x;
983 U8CPU g = y;
984 U8CPU b = 0xc;
985
986 U8CPU a = 0xff;
987 switch ((x+y) % 5) {
988 case 0:
989 a = 0xff;
990 break;
991 case 1:
992 a = 0x80;
993 break;
994 case 2:
995 a = 0xCC;
996 break;
997 case 4:
998 a = 0x01;
999 break;
1000 case 3:
1001 a = 0x00;
1002 break;
1003 }
1004 a = 0xff;
1005 return SkPremultiplyARGBInline(a, r, g, b);
1006}
1007
1008static SkBitmap make_src_bitmap() {
1009 static SkBitmap bmp;
1010 if (bmp.isNull()) {
1011 bmp.allocN32Pixels(DEV_W, DEV_H);
1012 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1013 for (int y = 0; y < DEV_H; ++y) {
1014 for (int x = 0; x < DEV_W; ++x) {
1015 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1016 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1017 *pixel = get_src_color(x, y);
1018 }
1019 }
1020 }
1021 return bmp;
1022}
1023
1024static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1025 const SkBitmap& dstBitmap) {
1026 bool result = true;
1027 for (int y = 0; y < DEV_H && result; ++y) {
1028 for (int x = 0; x < DEV_W && result; ++x) {
1029 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1030 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1031 if (srcPixel != dstPixel) {
1032 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1033 x, y, srcPixel, dstPixel);
1034 result = false;
1035 } /*else {
1036 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1037 x, y, srcPixel, dstPixel);
1038
1039 }*/
1040 }
1041 }
1042 return result;
1043}
1044
1045static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1046 AHardwareBuffer* buffer) {
1047 if (srcHelper) {
1048 srcHelper->cleanup();
1049 }
1050 if (dstHelper) {
1051 dstHelper->cleanup();
1052 }
1053 if (buffer) {
1054 AHardwareBuffer_release(buffer);
1055 }
1056}
1057
1058enum class SrcType {
1059 kCPU,
1060 kEGL,
1061 kVulkan,
1062};
1063
1064enum class DstType {
1065 kEGL,
1066 kVulkan,
1067};
1068
1069void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1070 SrcType srcType, DstType dstType, bool shareSyncs) {
1071 if (SrcType::kCPU == srcType && shareSyncs) {
1072 // We don't currently test this since we don't do any syncs in this case.
1073 return;
1074 }
1075 std::unique_ptr<BaseTestHelper> srcHelper;
1076 std::unique_ptr<BaseTestHelper> dstHelper;
1077 AHardwareBuffer* buffer = nullptr;
1078 if (SrcType::kVulkan == srcType) {
1079 srcHelper.reset(new VulkanTestHelper());
1080 } else if (SrcType::kEGL == srcType) {
1081 srcHelper.reset(new EGLTestHelper(options));
1082 }
1083 if (srcHelper) {
1084 if (!srcHelper->init(reporter)) {
1085 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1086 return;
1087 }
1088 }
1089
1090 if (DstType::kVulkan == dstType) {
1091 dstHelper.reset(new VulkanTestHelper());
1092 } else {
1093 SkASSERT(DstType::kEGL == dstType);
1094 dstHelper.reset(new EGLTestHelper(options));
1095 }
1096 if (dstHelper) {
1097 if (!dstHelper->init(reporter)) {
1098 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1099 return;
1100 }
1101 }
1102
1103 ///////////////////////////////////////////////////////////////////////////
1104 // Setup SkBitmaps
1105 ///////////////////////////////////////////////////////////////////////////
1106
1107 SkBitmap srcBitmap = make_src_bitmap();
1108 SkBitmap dstBitmapSurface;
1109 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1110 SkBitmap dstBitmapFinal;
1111 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1112
1113 ///////////////////////////////////////////////////////////////////////////
1114 // Setup AHardwareBuffer
1115 ///////////////////////////////////////////////////////////////////////////
1116
1117 AHardwareBuffer_Desc hwbDesc;
1118 hwbDesc.width = DEV_W;
1119 hwbDesc.height = DEV_H;
1120 hwbDesc.layers = 1;
1121 if (SrcType::kCPU == srcType) {
1122 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1123 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1124 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1125 } else {
1126 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1127 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1128 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1129 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1130 }
1131 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1132 // The following three are not used in the allocate
1133 hwbDesc.stride = 0;
1134 hwbDesc.rfu0= 0;
1135 hwbDesc.rfu1= 0;
1136
1137 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1138 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1139 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1140 return;
1141 }
1142
1143 if (SrcType::kCPU == srcType) {
1144 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1145 AHardwareBuffer_describe(buffer, &hwbDesc);
1146
1147 uint32_t* bufferAddr;
1148 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1149 reinterpret_cast<void**>(&bufferAddr))) {
1150 ERRORF(reporter, "Failed to lock hardware buffer");
1151 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1152 return;
1153 }
1154
1155 int bbp = srcBitmap.bytesPerPixel();
1156 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1157 uint32_t* dst = bufferAddr;
1158 for (int y = 0; y < DEV_H; ++y) {
1159 memcpy(dst, src, DEV_W * bbp);
1160 src += DEV_W;
1161 dst += hwbDesc.stride;
1162 }
1163
1164 for (int y = 0; y < DEV_H; ++y) {
1165 for (int x = 0; x < DEV_W; ++x) {
1166 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1167 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1168 if (srcPixel != dstPixel) {
1169 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1170 x, y, srcPixel, dstPixel);
1171 }
1172 }
1173 }
1174
1175 AHardwareBuffer_unlock(buffer, nullptr);
1176
1177 } else {
1178 srcHelper->makeCurrent();
1179 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1180
1181 if (!surface) {
1182 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1183 return;
1184 }
1185
1186 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap);
1187 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1188
1189 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1190 // to occur.
1191 if (!shareSyncs) {
1192 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1193 if (!readResult) {
1194 ERRORF(reporter, "Read Pixels on surface failed");
1195 surface.reset();
1196 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1197 return;
1198 }
1199 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1200 }
1201
1202 ///////////////////////////////////////////////////////////////////////////
1203 // Cleanup GL/EGL and add syncs
1204 ///////////////////////////////////////////////////////////////////////////
1205
1206 if (shareSyncs) {
1207 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1208 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1209 return;
1210 }
1211 } else {
1212 surface.reset();
1213 srcHelper->doClientSync();
1214 srcHelper->releaseImage();
1215 }
1216 }
1217
1218 ///////////////////////////////////////////////////////////////////////////
1219 // Import the HWB into backend and draw it to a surface
1220 ///////////////////////////////////////////////////////////////////////////
1221
1222 dstHelper->makeCurrent();
1223 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1224
1225 if (!wrappedImage) {
1226 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1227 return;
1228 }
1229
1230 GrContext* grContext = dstHelper->grContext();
1231
1232 // Make SkSurface to render wrapped HWB into.
1233 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1234 kPremul_SkAlphaType, nullptr);
1235
1236 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(grContext,
1237 SkBudgeted::kNo, imageInfo, 0,
1238 kTopLeft_GrSurfaceOrigin,
1239 nullptr, false);
1240 if (!dstSurf.get()) {
1241 ERRORF(reporter, "Failed to create destination SkSurface");
1242 wrappedImage.reset();
1243 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1244 return;
1245 }
1246
1247 if (shareSyncs) {
1248 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1249 wrappedImage.reset();
1250 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1251 return;
1252 }
1253 }
1254 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1255
1256 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1257 if (!readResult) {
1258 ERRORF(reporter, "Read Pixels failed");
1259 wrappedImage.reset();
1260 dstSurf.reset();
1261 dstHelper->doClientSync();
1262 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1263 return;
1264 }
1265
1266 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1267
1268 dstSurf.reset();
1269 wrappedImage.reset();
1270 dstHelper->doClientSync();
1271 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1272}
1273
1274DEF_GPUTEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options) {
1275 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1276}
1277
1278DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options) {
1279 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1280}
1281
1282DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan, reporter, options) {
1283 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1284}
1285
1286DEF_GPUTEST(VulkanHardwareBuffer_CPU_EGL, reporter, options) {
1287 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1288}
1289
1290DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL, reporter, options) {
1291 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1292}
1293
1294DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options) {
1295 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1296}
1297
1298DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL_Syncs, reporter, options) {
1299 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1300}
1301
1302DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs, reporter, options) {
1303 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1304}
1305
1306DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs, reporter, options) {
1307 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1308}
1309
1310DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs, reporter, options) {
1311 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1312}
1313
1314#endif
1315