blob: 920940115fab248195ce44125a485500aa9e8fce [file] [log] [blame]
Greg Daniel24d861d2019-01-30 15:13:22 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
10#include "SkTypes.h"
11
12#if SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
14#include "GrBackendSemaphore.h"
15#include "GrContext.h"
16#include "GrContextFactory.h"
17#include "GrContextPriv.h"
18#include "GrGpu.h"
19#include "GrProxyProvider.h"
20#include "SkAutoMalloc.h"
21#include "SkCanvas.h"
22#include "SkGr.h"
23#include "SkImage.h"
24#include "SkSurface.h"
25#include "Test.h"
26#include "../tools/gpu/vk/VkTestUtils.h"
27#include "gl/GrGLDefines.h"
28#include "gl/GrGLUtil.h"
29#include "vk/GrVkBackendContext.h"
30#include "vk/GrVkExtensions.h"
31
32#include <android/hardware_buffer.h>
33#include <cinttypes>
34
35#include <EGL/egl.h>
36#include <EGL/eglext.h>
37#include <GLES/gl.h>
38#include <GLES/glext.h>
39
40static const int DEV_W = 16, DEV_H = 16;
41
42class BaseTestHelper {
43public:
44 virtual ~BaseTestHelper() {}
45
46 virtual bool init(skiatest::Reporter* reporter) = 0;
47
48 virtual void cleanup() = 0;
49 virtual void releaseImage() = 0;
50
51 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
52 AHardwareBuffer* buffer) = 0;
53 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
54 AHardwareBuffer* buffer) = 0;
55
56 virtual void doClientSync() = 0;
57 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
58 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
59 sk_sp<SkSurface>) = 0;
60
61 virtual void makeCurrent() = 0;
62
63 virtual GrContext* grContext() = 0;
64
65 int getFdHandle() { return fFdHandle; }
66
67protected:
68 BaseTestHelper() {}
69
70 int fFdHandle = 0;
71};
72
73class EGLTestHelper : public BaseTestHelper {
74public:
75 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
76
77 ~EGLTestHelper() override {}
78
79 void releaseImage() override {
80 this->makeCurrent();
81 if (!fGLCtx) {
82 return;
83 }
84 if (EGL_NO_IMAGE_KHR != fImage) {
85 fGLCtx->destroyEGLImage(fImage);
86 fImage = EGL_NO_IMAGE_KHR;
87 }
88 if (fTexID) {
89 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
90 fTexID = 0;
91 }
92 }
93
94 void cleanup() override {
95 this->releaseImage();
96 }
97
98 bool init(skiatest::Reporter* reporter) override;
99
100 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
101 AHardwareBuffer* buffer) override;
102 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
103 AHardwareBuffer* buffer) override;
104
105 void doClientSync() override;
106 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
107 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
108 sk_sp<SkSurface>) override;
109
110 void makeCurrent() override { fGLCtx->makeCurrent(); }
111
112 GrContext* grContext() override { return fGrContext; }
113
114private:
115 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
116
117 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
118 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
119 const EGLint*);
120 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
121 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
122 EGLCreateImageKHRProc fEGLCreateImageKHR;
123 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
124
125 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
126 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
127 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
128 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
129 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
130
131 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
132 GrGLuint fTexID = 0;
133
134 sk_gpu_test::GrContextFactory fFactory;
135 sk_gpu_test::ContextInfo fGLESContextInfo;
136
137 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
138 GrContext* fGrContext = nullptr;
139};
140
141bool EGLTestHelper::init(skiatest::Reporter* reporter) {
142 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType);
143 fGrContext = fGLESContextInfo.grContext();
144 fGLCtx = fGLESContextInfo.glContext();
145 if (!fGrContext || !fGLCtx) {
146 return false;
147 }
148
149 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
150 return false;
151 }
152
153 // Confirm we have egl and the needed extensions
154 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
155 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
156 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
157 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
158 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync")) {
159 return false;
160 }
161
162 fEGLGetNativeClientBufferANDROID =
163 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
164 if (!fEGLGetNativeClientBufferANDROID) {
165 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
166 return false;
167 }
168
169 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
170 if (!fEGLCreateImageKHR) {
171 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
172 return false;
173 }
174
175 fEGLImageTargetTexture2DOES =
176 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
177 if (!fEGLImageTargetTexture2DOES) {
178 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
179 return false;
180 }
181
182 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
183 if (!fEGLCreateSyncKHR) {
184 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
185 return false;
186
187 }
188 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
189 if (!fEGLWaitSyncKHR) {
190 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
191 return false;
192
193 }
194 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
195 if (!fEGLGetSyncAttribKHR) {
196 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
197 return false;
198
199 }
200 fEGLDupNativeFenceFDANDROID =
201 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
202 if (!fEGLDupNativeFenceFDANDROID) {
203 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
204 return false;
205
206 }
207 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
208 if (!fEGLDestroySyncKHR) {
209 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
210 return false;
211
212 }
213
214 return true;
215}
216
217bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
218 GrGLClearErr(fGLCtx->gl());
219
220 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
221 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
222 EGL_NONE };
223 EGLDisplay eglDisplay = eglGetCurrentDisplay();
224 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
225 EGL_NATIVE_BUFFER_ANDROID,
226 eglClientBuffer, eglAttribs);
227 if (EGL_NO_IMAGE_KHR == fImage) {
228 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
229 return false;
230 }
231
232 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
233 if (!fTexID) {
234 ERRORF(reporter, "Failed to create GL Texture");
235 return false;
236 }
237 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
238 if (GR_GL_GET_ERROR(fGLCtx->gl()) != GR_GL_NO_ERROR) {
239 ERRORF(reporter, "Failed to bind GL Texture");
240 return false;
241 }
242
243 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
244 GLenum status = GL_NO_ERROR;
245 if ((status = glGetError()) != GL_NO_ERROR) {
246 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) status);
247 return false;
248 }
249
250 fGrContext->resetContext(kTextureBinding_GrGLBackendState);
251 return true;
252}
253
254sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
255 AHardwareBuffer* buffer) {
256 if (!this->importHardwareBuffer(reporter, buffer)) {
257 return nullptr;
258 }
259 GrGLTextureInfo textureInfo;
260 textureInfo.fTarget = GR_GL_TEXTURE_2D;
261 textureInfo.fID = fTexID;
262 textureInfo.fFormat = GR_GL_RGBA8;
263
264 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
265 REPORTER_ASSERT(reporter, backendTex.isValid());
266
267 sk_sp<SkImage> image = SkImage::MakeFromTexture(fGrContext,
268 backendTex,
269 kTopLeft_GrSurfaceOrigin,
270 kRGBA_8888_SkColorType,
271 kPremul_SkAlphaType,
272 nullptr);
273
274 if (!image) {
275 ERRORF(reporter, "Failed to make wrapped GL SkImage");
276 return nullptr;
277 }
278
279 return image;
280}
281
282sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
283 AHardwareBuffer* buffer) {
284 if (!this->importHardwareBuffer(reporter, buffer)) {
285 return nullptr;
286 }
287 GrGLTextureInfo textureInfo;
288 textureInfo.fTarget = GR_GL_TEXTURE_2D;
289 textureInfo.fID = fTexID;
290 textureInfo.fFormat = GR_GL_RGBA8;
291
292 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
293 REPORTER_ASSERT(reporter, backendTex.isValid());
294
295 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext,
296 backendTex,
297 kTopLeft_GrSurfaceOrigin,
298 0,
299 kRGBA_8888_SkColorType,
300 nullptr, nullptr);
301
302 if (!surface) {
303 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
304 return nullptr;
305 }
306
307 return surface;
308}
309
310bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
311 sk_sp<SkSurface> surface) {
312 EGLDisplay eglDisplay = eglGetCurrentDisplay();
313 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
314 if (EGL_NO_SYNC_KHR == eglsync) {
315 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
316 return false;
317 }
318
319 surface->flush();
320 GR_GL_CALL(fGLCtx->gl(), Flush());
321 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
322
323 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
324 if (EGL_TRUE != result) {
325 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
326 return false;
327 }
328
329 return true;
330}
331
332bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
333 sk_sp<SkSurface> surface) {
334 EGLDisplay eglDisplay = eglGetCurrentDisplay();
335 EGLint attr[] = {
336 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
337 EGL_NONE
338 };
339 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
340 if (EGL_NO_SYNC_KHR == eglsync) {
341 ERRORF(reporter,
342 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
343 return false;
344 }
345 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
346 if (EGL_TRUE != result) {
347 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
348 // Don't return false yet, try to delete the sync first
349 }
350 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
351 if (EGL_TRUE != result) {
352 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
353 return false;
354 }
355 return true;
356}
357
358void EGLTestHelper::doClientSync() {
359 sk_gpu_test::FenceSync* fenceSync = fGLCtx->fenceSync();
360 sk_gpu_test::PlatformFence fence = fenceSync->insertFence();
361 fenceSync->waitFence(fence);
362 fenceSync->deleteFence(fence);
363}
364
365#define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
366
367#define ACQUIRE_INST_VK_PROC(name) \
368 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
369 VK_NULL_HANDLE)); \
370 if (fVk##name == nullptr) { \
371 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
372 return false; \
373 }
374
375#define ACQUIRE_DEVICE_VK_PROC(name) \
376 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
377 if (fVk##name == nullptr) { \
378 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
379 return false; \
380 }
381
382class VulkanTestHelper : public BaseTestHelper {
383public:
384 VulkanTestHelper() {}
385
386 ~VulkanTestHelper() override {}
387
388 void releaseImage() override {
389 if (VK_NULL_HANDLE == fDevice) {
390 return;
391 }
392 if (fImage != VK_NULL_HANDLE) {
393 fVkDestroyImage(fDevice, fImage, nullptr);
394 fImage = VK_NULL_HANDLE;
395 }
396
397 if (fMemory != VK_NULL_HANDLE) {
398 fVkFreeMemory(fDevice, fMemory, nullptr);
399 fMemory = VK_NULL_HANDLE;
400 }
401 }
402 void cleanup() override {
Greg Daniel24d861d2019-01-30 15:13:22 -0500403 fGrContext.reset();
Greg Daniel822d2232019-02-05 15:54:24 -0500404 this->releaseImage();
405 if (fSignalSemaphore != VK_NULL_HANDLE) {
406 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
407 fSignalSemaphore = VK_NULL_HANDLE;
408 }
Greg Daniel24d861d2019-01-30 15:13:22 -0500409 fBackendContext.fMemoryAllocator.reset();
410 if (fDevice != VK_NULL_HANDLE) {
411 fVkDeviceWaitIdle(fDevice);
412 fVkDestroyDevice(fDevice, nullptr);
413 fDevice = VK_NULL_HANDLE;
414 }
415#ifdef SK_ENABLE_VK_LAYERS
416 if (fDebugCallback != VK_NULL_HANDLE) {
417 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
418 }
419#endif
420 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
421 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
422 fBackendContext.fInstance = VK_NULL_HANDLE;
423 }
424
425 delete fExtensions;
426
427 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
428 delete fFeatures;
429 }
430
431 bool init(skiatest::Reporter* reporter) override;
432
433 void doClientSync() override {
434 if (!fGrContext) {
435 return;
436 }
437
Robert Phillips9da87e02019-02-04 13:26:26 -0500438 fGrContext->priv().getGpu()->testingOnly_flushGpuAndSync();
Greg Daniel24d861d2019-01-30 15:13:22 -0500439 }
440
441 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
442 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
443 sk_sp<SkSurface>) override;
444
445 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
446 AHardwareBuffer* buffer) override;
447
448 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
449 AHardwareBuffer* buffer) override;
450
451 void makeCurrent() override {}
452
453 GrContext* grContext() override { return fGrContext.get(); }
454
455private:
456 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
457
458 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
459 GrVkImageInfo* outImageInfo);
460
461 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
462 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
463
464 DECLARE_VK_PROC(DestroyInstance);
465 DECLARE_VK_PROC(DeviceWaitIdle);
466 DECLARE_VK_PROC(DestroyDevice);
467
468 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
469 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
470 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
471
472 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
473
474 DECLARE_VK_PROC(CreateImage);
475 DECLARE_VK_PROC(GetImageMemoryRequirements2);
476 DECLARE_VK_PROC(DestroyImage);
477
478 DECLARE_VK_PROC(AllocateMemory);
479 DECLARE_VK_PROC(BindImageMemory2);
480 DECLARE_VK_PROC(FreeMemory);
481
482 DECLARE_VK_PROC(CreateSemaphore);
483 DECLARE_VK_PROC(GetSemaphoreFdKHR);
484 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
485 DECLARE_VK_PROC(DestroySemaphore);
486
487 VkImage fImage = VK_NULL_HANDLE;
488 VkDeviceMemory fMemory = VK_NULL_HANDLE;
489
490 GrVkExtensions* fExtensions = nullptr;
491 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
492 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
493 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
494
Greg Daniel822d2232019-02-05 15:54:24 -0500495 // We hold on to the semaphore so we can delete once the GPU is done.
496 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
497
Greg Daniel24d861d2019-01-30 15:13:22 -0500498 VkDevice fDevice = VK_NULL_HANDLE;
499
500 GrVkBackendContext fBackendContext;
501 sk_sp<GrContext> fGrContext;
502};
503
504bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
505 PFN_vkGetInstanceProcAddr instProc;
506 PFN_vkGetDeviceProcAddr devProc;
507 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
508 return false;
509 }
510 auto getProc = [&instProc, &devProc](const char* proc_name,
511 VkInstance instance, VkDevice device) {
512 if (device != VK_NULL_HANDLE) {
513 return devProc(device, proc_name);
514 }
515 return instProc(instance, proc_name);
516 };
517
518 fExtensions = new GrVkExtensions();
519 fFeatures = new VkPhysicalDeviceFeatures2;
520 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
521 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
522 fFeatures->pNext = nullptr;
523
524 fBackendContext.fInstance = VK_NULL_HANDLE;
525 fBackendContext.fDevice = VK_NULL_HANDLE;
526
527 if (!sk_gpu_test::CreateVkBackendContext(getProc, &fBackendContext, fExtensions,
528 fFeatures, &fDebugCallback)) {
529 return false;
530 }
531 fDevice = fBackendContext.fDevice;
532
533 if (fDebugCallback != VK_NULL_HANDLE) {
534 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
535 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
536 }
537
538 ACQUIRE_INST_VK_PROC(DestroyInstance);
539 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
540 ACQUIRE_INST_VK_PROC(DestroyDevice);
541
542 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
543 2)) {
544 return false;
545 }
546 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
547 return false;
548 }
549 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
550 return false;
551 }
552 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
553 // return false;
554 }
555
556 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
557 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
558 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
559
560 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
561
562 ACQUIRE_DEVICE_VK_PROC(CreateImage);
563 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
564 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
565
566 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
567 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
568 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
569
570 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
571 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
572 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
573 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
574
575 fGrContext = GrContext::MakeVulkan(fBackendContext);
576 REPORTER_ASSERT(reporter, fGrContext.get());
577 if (!fGrContext) {
578 return false;
579 }
580
581 return this->checkOptimalHardwareBuffer(reporter);
582}
583
584bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
585 VkResult err;
586
587 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
588 externalImageFormatInfo.sType =
589 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
590 externalImageFormatInfo.pNext = nullptr;
591 externalImageFormatInfo.handleType =
592 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
593 //externalImageFormatInfo.handType = 0x80;
594
595 // We will create the hardware buffer with gpu sampled so these usages should all be valid
596 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
597 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
598 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
599 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
600 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
601 imageFormatInfo.pNext = &externalImageFormatInfo;
602 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
603 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
604 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
605 imageFormatInfo.usage = usageFlags;
606 imageFormatInfo.flags = 0;
607
608 VkAndroidHardwareBufferUsageANDROID hwbUsage;
609 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
610 hwbUsage.pNext = nullptr;
611
612 VkExternalImageFormatProperties externalImgFormatProps;
613 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
614 externalImgFormatProps.pNext = &hwbUsage;
615
616 VkImageFormatProperties2 imgFormProps;
617 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
618 imgFormProps.pNext = &externalImgFormatProps;
619
620 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
621 &imageFormatInfo, &imgFormProps);
622 if (VK_SUCCESS != err) {
623 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
624 return false;
625 }
626
627 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
628 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
629 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
630
631 const VkExternalMemoryProperties& externalImageFormatProps =
632 externalImgFormatProps.externalMemoryProperties;
633 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
634 externalImageFormatProps.externalMemoryFeatures));
635 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
636 externalImageFormatProps.externalMemoryFeatures));
637
638 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
639 hwbUsage.androidHardwareBufferUsage));
640
641 return true;
642}
643
644bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
645 AHardwareBuffer* buffer,
646 bool forWrite,
647 GrVkImageInfo* outImageInfo) {
648 VkResult err;
649
650 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
651 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
652 hwbFormatProps.pNext = nullptr;
653
654 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
655 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
656 hwbProps.pNext = &hwbFormatProps;
657
658 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
659 if (VK_SUCCESS != err) {
Greg Daniel932dd072019-01-31 14:07:58 -0500660 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
Greg Daniel24d861d2019-01-30 15:13:22 -0500661 return false;
662 }
663
664 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
665 REPORTER_ASSERT(reporter,
666 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
667 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
668 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
669 if (forWrite) {
670 REPORTER_ASSERT(reporter,
671 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
672
673 }
674
675 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
676 const VkExternalFormatANDROID externalFormatInfo {
677 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
678 nullptr, // pNext
679 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
680 };
681
682 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
683 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
684 &externalFormatInfo, // pNext
Greg Daniel24d861d2019-01-30 15:13:22 -0500685 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
Greg Daniel24d861d2019-01-30 15:13:22 -0500686 };
687
688 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
689 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
690 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
691 if (forWrite) {
692 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
693 }
694
695 const VkImageCreateInfo imageCreateInfo = {
696 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
697 &externalMemoryImageInfo, // pNext
698 0, // VkImageCreateFlags
699 VK_IMAGE_TYPE_2D, // VkImageType
700 hwbFormatProps.format, // VkFormat
701 { DEV_W, DEV_H, 1 }, // VkExtent3D
702 1, // mipLevels
703 1, // arrayLayers
704 VK_SAMPLE_COUNT_1_BIT, // samples
705 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
706 usageFlags, // VkImageUsageFlags
707 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
708 0, // queueFamilyCount
709 0, // pQueueFamilyIndices
710 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
711 };
712
713 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
714 if (VK_SUCCESS != err) {
715 ERRORF(reporter, "Create Image failed, err: %d", err);
716 return false;
717 }
718
Greg Daniel24d861d2019-01-30 15:13:22 -0500719 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
720 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
721 phyDevMemProps.pNext = nullptr;
722
723 uint32_t typeIndex = 0;
724 uint32_t heapIndex = 0;
725 bool foundHeap = false;
726 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
727 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
728 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
729 if (hwbProps.memoryTypeBits & (1 << i)) {
730 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
731 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
732 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
733 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
734 typeIndex = i;
735 heapIndex = pdmp.memoryTypes[i].heapIndex;
736 foundHeap = true;
737 }
738 }
739 }
740 if (!foundHeap) {
741 ERRORF(reporter, "Failed to find valid heap for imported memory");
742 return false;
743 }
744
745 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
746 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
747 hwbImportInfo.pNext = nullptr;
748 hwbImportInfo.buffer = buffer;
749
750 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
751 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
752 dedicatedAllocInfo.pNext = &hwbImportInfo;
753 dedicatedAllocInfo.image = fImage;
754 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
755
756 VkMemoryAllocateInfo allocInfo = {
757 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
758 &dedicatedAllocInfo, // pNext
759 hwbProps.allocationSize, // allocationSize
760 typeIndex, // memoryTypeIndex
761 };
762
763 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
764 if (VK_SUCCESS != err) {
765 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
766 return false;
767 }
768
769 VkBindImageMemoryInfo bindImageInfo;
770 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
771 bindImageInfo.pNext = nullptr;
772 bindImageInfo.image = fImage;
773 bindImageInfo.memory = fMemory;
774 bindImageInfo.memoryOffset = 0;
775
776 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
777 if (VK_SUCCESS != err) {
778 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
779 return false;
780 }
781
782 outImageInfo->fImage = fImage;
783 outImageInfo->fAlloc = GrVkAlloc(fMemory, 0, hwbProps.allocationSize, 0);
784 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
785 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
786 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
787 outImageInfo->fLevelCount = 1;
788 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
789 return true;
790}
791
792sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
793 AHardwareBuffer* buffer) {
794 GrVkImageInfo imageInfo;
795 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
796 return nullptr;
797 }
798
799 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
800
801 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fGrContext.get(),
802 backendTex,
803 kTopLeft_GrSurfaceOrigin,
804 kRGBA_8888_SkColorType,
805 kPremul_SkAlphaType,
806 nullptr);
807
808 if (!wrappedImage.get()) {
809 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
810 return nullptr;
811 }
812
813 return wrappedImage;
814}
815
816bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
817 sk_sp<SkSurface> surface) {
818 surface->flush();
819 surface.reset();
820 GrBackendSemaphore semaphore;
821 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
822 return false;
823 }
Greg Danielb9990e42019-04-10 16:28:52 -0400824 GrSemaphoresSubmitted submitted = fGrContext->flush(kNone_GrFlushFlags, 1, &semaphore);
Greg Daniel24d861d2019-01-30 15:13:22 -0500825 if (GrSemaphoresSubmitted::kNo == submitted) {
Greg Danielb9990e42019-04-10 16:28:52 -0400826 ERRORF(reporter, "Failing call to flush on GrContext");
Greg Daniel24d861d2019-01-30 15:13:22 -0500827 return false;
828 }
829 SkASSERT(semaphore.isInitialized());
830 if (!this->exportSemaphore(reporter, semaphore)) {
831 return false;
832 }
833 return true;
834}
835
836bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
837 GrBackendSemaphore* beSemaphore) {
838 // Query supported info
839 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
840 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
841 exSemInfo.pNext = nullptr;
842 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
843
844 VkExternalSemaphoreProperties exSemProps;
845 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
846 exSemProps.pNext = nullptr;
847
848 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
849 &exSemProps);
850
851 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
852 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
853 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
854 return false;
855 }
856 if (!SkToBool(exSemProps.compatibleHandleTypes &
857 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
858 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
859 return false;
860 }
861 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
862 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
863 !SkToBool(exSemProps.externalSemaphoreFeatures &
864 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
865 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
866 return false;
867 }
868
869 VkExportSemaphoreCreateInfo exportInfo;
870 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
871 exportInfo.pNext = nullptr;
872 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
873
874 VkSemaphoreCreateInfo semaphoreInfo;
875 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
876 semaphoreInfo.pNext = &exportInfo;
877 semaphoreInfo.flags = 0;
878
879 VkSemaphore semaphore;
880 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
881 if (VK_SUCCESS != err) {
882 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
883 return false;
884 }
885 beSemaphore->initVulkan(semaphore);
886 return true;
887}
888
889bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
890 const GrBackendSemaphore& beSemaphore) {
891 VkSemaphore semaphore = beSemaphore.vkSemaphore();
892 if (VK_NULL_HANDLE == semaphore) {
893 ERRORF(reporter, "Invalid vulkan handle in export call");
894 return false;
895 }
896
897 VkSemaphoreGetFdInfoKHR getFdInfo;
898 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
899 getFdInfo.pNext = nullptr;
900 getFdInfo.semaphore = semaphore;
901 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
902
903 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
904 if (VK_SUCCESS != err) {
905 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
906 return false;
907 }
Greg Daniel822d2232019-02-05 15:54:24 -0500908 fSignalSemaphore = semaphore;
Greg Daniel24d861d2019-01-30 15:13:22 -0500909 return true;
910}
911
912bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
913 sk_sp<SkSurface> surface) {
914 VkSemaphoreCreateInfo semaphoreInfo;
915 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
916 semaphoreInfo.pNext = nullptr;
917 semaphoreInfo.flags = 0;
918
919 VkSemaphore semaphore;
920 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
921 if (VK_SUCCESS != err) {
922 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
923 return false;
924 }
925
926 VkImportSemaphoreFdInfoKHR importInfo;
927 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
928 importInfo.pNext = nullptr;
929 importInfo.semaphore = semaphore;
930 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
931 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
932 importInfo.fd = fdHandle;
933
934 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
935 if (VK_SUCCESS != err) {
936 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
937 return false;
938 }
939
940 GrBackendSemaphore beSemaphore;
941 beSemaphore.initVulkan(semaphore);
942 if (!surface->wait(1, &beSemaphore)) {
943 ERRORF(reporter, "Failed to add wait semaphore to surface");
944 fVkDestroySemaphore(fDevice, semaphore, nullptr);
945 return false;
946 }
947 return true;
948}
949
950sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
951 AHardwareBuffer* buffer) {
952 GrVkImageInfo imageInfo;
953 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
954 return nullptr;
955 }
956
957 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
958
959 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext.get(),
960 backendTex,
961 kTopLeft_GrSurfaceOrigin,
962 0,
963 kRGBA_8888_SkColorType,
964 nullptr, nullptr);
965
966 if (!surface.get()) {
967 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
968 return nullptr;
969 }
970
971 return surface;
972}
973
974static SkPMColor get_src_color(int x, int y) {
975 SkASSERT(x >= 0 && x < DEV_W);
976 SkASSERT(y >= 0 && y < DEV_H);
977
978 U8CPU r = x;
979 U8CPU g = y;
980 U8CPU b = 0xc;
981
982 U8CPU a = 0xff;
983 switch ((x+y) % 5) {
984 case 0:
985 a = 0xff;
986 break;
987 case 1:
988 a = 0x80;
989 break;
990 case 2:
991 a = 0xCC;
992 break;
993 case 4:
994 a = 0x01;
995 break;
996 case 3:
997 a = 0x00;
998 break;
999 }
1000 a = 0xff;
1001 return SkPremultiplyARGBInline(a, r, g, b);
1002}
1003
1004static SkBitmap make_src_bitmap() {
1005 static SkBitmap bmp;
1006 if (bmp.isNull()) {
1007 bmp.allocN32Pixels(DEV_W, DEV_H);
1008 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1009 for (int y = 0; y < DEV_H; ++y) {
1010 for (int x = 0; x < DEV_W; ++x) {
1011 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1012 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1013 *pixel = get_src_color(x, y);
1014 }
1015 }
1016 }
1017 return bmp;
1018}
1019
1020static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1021 const SkBitmap& dstBitmap) {
1022 bool result = true;
1023 for (int y = 0; y < DEV_H && result; ++y) {
1024 for (int x = 0; x < DEV_W && result; ++x) {
1025 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1026 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1027 if (srcPixel != dstPixel) {
1028 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1029 x, y, srcPixel, dstPixel);
1030 result = false;
1031 } /*else {
1032 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1033 x, y, srcPixel, dstPixel);
1034
1035 }*/
1036 }
1037 }
1038 return result;
1039}
1040
1041static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1042 AHardwareBuffer* buffer) {
1043 if (srcHelper) {
1044 srcHelper->cleanup();
1045 }
1046 if (dstHelper) {
1047 dstHelper->cleanup();
1048 }
1049 if (buffer) {
1050 AHardwareBuffer_release(buffer);
1051 }
1052}
1053
1054enum class SrcType {
1055 kCPU,
1056 kEGL,
1057 kVulkan,
1058};
1059
1060enum class DstType {
1061 kEGL,
1062 kVulkan,
1063};
1064
1065void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1066 SrcType srcType, DstType dstType, bool shareSyncs) {
1067 if (SrcType::kCPU == srcType && shareSyncs) {
1068 // We don't currently test this since we don't do any syncs in this case.
1069 return;
1070 }
1071 std::unique_ptr<BaseTestHelper> srcHelper;
1072 std::unique_ptr<BaseTestHelper> dstHelper;
1073 AHardwareBuffer* buffer = nullptr;
1074 if (SrcType::kVulkan == srcType) {
1075 srcHelper.reset(new VulkanTestHelper());
1076 } else if (SrcType::kEGL == srcType) {
1077 srcHelper.reset(new EGLTestHelper(options));
1078 }
1079 if (srcHelper) {
1080 if (!srcHelper->init(reporter)) {
1081 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1082 return;
1083 }
1084 }
1085
1086 if (DstType::kVulkan == dstType) {
1087 dstHelper.reset(new VulkanTestHelper());
1088 } else {
1089 SkASSERT(DstType::kEGL == dstType);
1090 dstHelper.reset(new EGLTestHelper(options));
1091 }
1092 if (dstHelper) {
1093 if (!dstHelper->init(reporter)) {
1094 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1095 return;
1096 }
1097 }
1098
1099 ///////////////////////////////////////////////////////////////////////////
1100 // Setup SkBitmaps
1101 ///////////////////////////////////////////////////////////////////////////
1102
1103 SkBitmap srcBitmap = make_src_bitmap();
1104 SkBitmap dstBitmapSurface;
1105 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1106 SkBitmap dstBitmapFinal;
1107 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1108
1109 ///////////////////////////////////////////////////////////////////////////
1110 // Setup AHardwareBuffer
1111 ///////////////////////////////////////////////////////////////////////////
1112
1113 AHardwareBuffer_Desc hwbDesc;
1114 hwbDesc.width = DEV_W;
1115 hwbDesc.height = DEV_H;
1116 hwbDesc.layers = 1;
1117 if (SrcType::kCPU == srcType) {
1118 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1119 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1120 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1121 } else {
1122 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1123 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1124 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1125 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1126 }
1127 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1128 // The following three are not used in the allocate
1129 hwbDesc.stride = 0;
1130 hwbDesc.rfu0= 0;
1131 hwbDesc.rfu1= 0;
1132
1133 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1134 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1135 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1136 return;
1137 }
1138
1139 if (SrcType::kCPU == srcType) {
1140 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1141 AHardwareBuffer_describe(buffer, &hwbDesc);
1142
1143 uint32_t* bufferAddr;
1144 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1145 reinterpret_cast<void**>(&bufferAddr))) {
1146 ERRORF(reporter, "Failed to lock hardware buffer");
1147 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1148 return;
1149 }
1150
1151 int bbp = srcBitmap.bytesPerPixel();
1152 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1153 uint32_t* dst = bufferAddr;
1154 for (int y = 0; y < DEV_H; ++y) {
1155 memcpy(dst, src, DEV_W * bbp);
1156 src += DEV_W;
1157 dst += hwbDesc.stride;
1158 }
1159
1160 for (int y = 0; y < DEV_H; ++y) {
1161 for (int x = 0; x < DEV_W; ++x) {
1162 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1163 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1164 if (srcPixel != dstPixel) {
1165 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1166 x, y, srcPixel, dstPixel);
1167 }
1168 }
1169 }
1170
1171 AHardwareBuffer_unlock(buffer, nullptr);
1172
1173 } else {
1174 srcHelper->makeCurrent();
1175 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1176
1177 if (!surface) {
1178 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1179 return;
1180 }
1181
1182 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap);
1183 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1184
1185 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1186 // to occur.
1187 if (!shareSyncs) {
1188 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1189 if (!readResult) {
1190 ERRORF(reporter, "Read Pixels on surface failed");
1191 surface.reset();
1192 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1193 return;
1194 }
1195 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1196 }
1197
1198 ///////////////////////////////////////////////////////////////////////////
1199 // Cleanup GL/EGL and add syncs
1200 ///////////////////////////////////////////////////////////////////////////
1201
1202 if (shareSyncs) {
1203 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1204 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1205 return;
1206 }
1207 } else {
1208 surface.reset();
1209 srcHelper->doClientSync();
1210 srcHelper->releaseImage();
1211 }
1212 }
1213
1214 ///////////////////////////////////////////////////////////////////////////
1215 // Import the HWB into backend and draw it to a surface
1216 ///////////////////////////////////////////////////////////////////////////
1217
1218 dstHelper->makeCurrent();
1219 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1220
1221 if (!wrappedImage) {
1222 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1223 return;
1224 }
1225
1226 GrContext* grContext = dstHelper->grContext();
1227
1228 // Make SkSurface to render wrapped HWB into.
1229 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1230 kPremul_SkAlphaType, nullptr);
1231
1232 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(grContext,
1233 SkBudgeted::kNo, imageInfo, 0,
1234 kTopLeft_GrSurfaceOrigin,
1235 nullptr, false);
1236 if (!dstSurf.get()) {
1237 ERRORF(reporter, "Failed to create destination SkSurface");
1238 wrappedImage.reset();
1239 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1240 return;
1241 }
1242
1243 if (shareSyncs) {
1244 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1245 wrappedImage.reset();
1246 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1247 return;
1248 }
1249 }
1250 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1251
1252 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1253 if (!readResult) {
1254 ERRORF(reporter, "Read Pixels failed");
1255 wrappedImage.reset();
1256 dstSurf.reset();
1257 dstHelper->doClientSync();
1258 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1259 return;
1260 }
1261
1262 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1263
1264 dstSurf.reset();
1265 wrappedImage.reset();
1266 dstHelper->doClientSync();
1267 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1268}
1269
1270DEF_GPUTEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options) {
1271 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1272}
1273
1274DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options) {
1275 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1276}
1277
1278DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan, reporter, options) {
1279 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1280}
1281
1282DEF_GPUTEST(VulkanHardwareBuffer_CPU_EGL, reporter, options) {
1283 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1284}
1285
1286DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL, reporter, options) {
1287 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1288}
1289
1290DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options) {
1291 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1292}
1293
1294DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL_Syncs, reporter, options) {
1295 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1296}
1297
1298DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs, reporter, options) {
1299 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1300}
1301
1302DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs, reporter, options) {
1303 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1304}
1305
1306DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs, reporter, options) {
1307 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1308}
1309
1310#endif
1311