blob: c37d1d743e0257ad5191e1727678b46bf55088de [file] [log] [blame]
Greg Daniel24d861d2019-01-30 15:13:22 -05001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8// This is a GPU-backend specific test. It relies on static intializers to work
9
10#include "SkTypes.h"
11
12#if SK_SUPPORT_GPU && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
14#include "GrBackendSemaphore.h"
15#include "GrContext.h"
16#include "GrContextFactory.h"
17#include "GrContextPriv.h"
18#include "GrGpu.h"
19#include "GrProxyProvider.h"
20#include "SkAutoMalloc.h"
21#include "SkCanvas.h"
22#include "SkGr.h"
23#include "SkImage.h"
24#include "SkSurface.h"
25#include "Test.h"
26#include "../tools/gpu/vk/VkTestUtils.h"
27#include "gl/GrGLDefines.h"
28#include "gl/GrGLUtil.h"
29#include "vk/GrVkBackendContext.h"
30#include "vk/GrVkExtensions.h"
31
32#include <android/hardware_buffer.h>
33#include <cinttypes>
34
35#include <EGL/egl.h>
36#include <EGL/eglext.h>
37#include <GLES/gl.h>
38#include <GLES/glext.h>
39
40static const int DEV_W = 16, DEV_H = 16;
41
42class BaseTestHelper {
43public:
44 virtual ~BaseTestHelper() {}
45
46 virtual bool init(skiatest::Reporter* reporter) = 0;
47
48 virtual void cleanup() = 0;
49 virtual void releaseImage() = 0;
50
51 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
52 AHardwareBuffer* buffer) = 0;
53 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
54 AHardwareBuffer* buffer) = 0;
55
56 virtual void doClientSync() = 0;
57 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
58 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
59 sk_sp<SkSurface>) = 0;
60
61 virtual void makeCurrent() = 0;
62
63 virtual GrContext* grContext() = 0;
64
65 int getFdHandle() { return fFdHandle; }
66
67protected:
68 BaseTestHelper() {}
69
70 int fFdHandle = 0;
71};
72
73class EGLTestHelper : public BaseTestHelper {
74public:
75 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
76
77 ~EGLTestHelper() override {}
78
79 void releaseImage() override {
80 this->makeCurrent();
81 if (!fGLCtx) {
82 return;
83 }
84 if (EGL_NO_IMAGE_KHR != fImage) {
85 fGLCtx->destroyEGLImage(fImage);
86 fImage = EGL_NO_IMAGE_KHR;
87 }
88 if (fTexID) {
89 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
90 fTexID = 0;
91 }
92 }
93
94 void cleanup() override {
95 this->releaseImage();
96 }
97
98 bool init(skiatest::Reporter* reporter) override;
99
100 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
101 AHardwareBuffer* buffer) override;
102 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
103 AHardwareBuffer* buffer) override;
104
105 void doClientSync() override;
106 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
107 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
108 sk_sp<SkSurface>) override;
109
110 void makeCurrent() override { fGLCtx->makeCurrent(); }
111
112 GrContext* grContext() override { return fGrContext; }
113
114private:
115 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
116
117 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
118 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
119 const EGLint*);
120 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
121 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
122 EGLCreateImageKHRProc fEGLCreateImageKHR;
123 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
124
125 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
126 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
127 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
128 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
129 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
130
131 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
132 GrGLuint fTexID = 0;
133
134 sk_gpu_test::GrContextFactory fFactory;
135 sk_gpu_test::ContextInfo fGLESContextInfo;
136
137 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
138 GrContext* fGrContext = nullptr;
139};
140
141bool EGLTestHelper::init(skiatest::Reporter* reporter) {
142 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType);
143 fGrContext = fGLESContextInfo.grContext();
144 fGLCtx = fGLESContextInfo.glContext();
145 if (!fGrContext || !fGLCtx) {
146 return false;
147 }
148
149 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
150 return false;
151 }
152
153 // Confirm we have egl and the needed extensions
154 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
155 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
156 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
157 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
158 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync")) {
159 return false;
160 }
161
162 fEGLGetNativeClientBufferANDROID =
163 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
164 if (!fEGLGetNativeClientBufferANDROID) {
165 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
166 return false;
167 }
168
169 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
170 if (!fEGLCreateImageKHR) {
171 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
172 return false;
173 }
174
175 fEGLImageTargetTexture2DOES =
176 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
177 if (!fEGLImageTargetTexture2DOES) {
178 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
179 return false;
180 }
181
182 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
183 if (!fEGLCreateSyncKHR) {
184 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
185 return false;
186
187 }
188 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
189 if (!fEGLWaitSyncKHR) {
190 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
191 return false;
192
193 }
194 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
195 if (!fEGLGetSyncAttribKHR) {
196 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
197 return false;
198
199 }
200 fEGLDupNativeFenceFDANDROID =
201 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
202 if (!fEGLDupNativeFenceFDANDROID) {
203 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
204 return false;
205
206 }
207 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
208 if (!fEGLDestroySyncKHR) {
209 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
210 return false;
211
212 }
213
214 return true;
215}
216
217bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
218 GrGLClearErr(fGLCtx->gl());
219
220 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
221 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
222 EGL_NONE };
223 EGLDisplay eglDisplay = eglGetCurrentDisplay();
224 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
225 EGL_NATIVE_BUFFER_ANDROID,
226 eglClientBuffer, eglAttribs);
227 if (EGL_NO_IMAGE_KHR == fImage) {
228 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
229 return false;
230 }
231
232 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
233 if (!fTexID) {
234 ERRORF(reporter, "Failed to create GL Texture");
235 return false;
236 }
237 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
238 if (GR_GL_GET_ERROR(fGLCtx->gl()) != GR_GL_NO_ERROR) {
239 ERRORF(reporter, "Failed to bind GL Texture");
240 return false;
241 }
242
243 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
244 GLenum status = GL_NO_ERROR;
245 if ((status = glGetError()) != GL_NO_ERROR) {
246 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) status);
247 return false;
248 }
249
250 fGrContext->resetContext(kTextureBinding_GrGLBackendState);
251 return true;
252}
253
254sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
255 AHardwareBuffer* buffer) {
256 if (!this->importHardwareBuffer(reporter, buffer)) {
257 return nullptr;
258 }
259 GrGLTextureInfo textureInfo;
260 textureInfo.fTarget = GR_GL_TEXTURE_2D;
261 textureInfo.fID = fTexID;
262 textureInfo.fFormat = GR_GL_RGBA8;
263
264 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
265 REPORTER_ASSERT(reporter, backendTex.isValid());
266
267 sk_sp<SkImage> image = SkImage::MakeFromTexture(fGrContext,
268 backendTex,
269 kTopLeft_GrSurfaceOrigin,
270 kRGBA_8888_SkColorType,
271 kPremul_SkAlphaType,
272 nullptr);
273
274 if (!image) {
275 ERRORF(reporter, "Failed to make wrapped GL SkImage");
276 return nullptr;
277 }
278
279 return image;
280}
281
282sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
283 AHardwareBuffer* buffer) {
284 if (!this->importHardwareBuffer(reporter, buffer)) {
285 return nullptr;
286 }
287 GrGLTextureInfo textureInfo;
288 textureInfo.fTarget = GR_GL_TEXTURE_2D;
289 textureInfo.fID = fTexID;
290 textureInfo.fFormat = GR_GL_RGBA8;
291
292 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo);
293 REPORTER_ASSERT(reporter, backendTex.isValid());
294
295 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext,
296 backendTex,
297 kTopLeft_GrSurfaceOrigin,
298 0,
299 kRGBA_8888_SkColorType,
300 nullptr, nullptr);
301
302 if (!surface) {
303 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
304 return nullptr;
305 }
306
307 return surface;
308}
309
310bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
311 sk_sp<SkSurface> surface) {
312 EGLDisplay eglDisplay = eglGetCurrentDisplay();
313 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
314 if (EGL_NO_SYNC_KHR == eglsync) {
315 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
316 return false;
317 }
318
319 surface->flush();
320 GR_GL_CALL(fGLCtx->gl(), Flush());
321 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
322
323 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
324 if (EGL_TRUE != result) {
325 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
326 return false;
327 }
328
329 return true;
330}
331
332bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
333 sk_sp<SkSurface> surface) {
334 EGLDisplay eglDisplay = eglGetCurrentDisplay();
335 EGLint attr[] = {
336 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
337 EGL_NONE
338 };
339 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
340 if (EGL_NO_SYNC_KHR == eglsync) {
341 ERRORF(reporter,
342 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
343 return false;
344 }
345 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
346 if (EGL_TRUE != result) {
347 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
348 // Don't return false yet, try to delete the sync first
349 }
350 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
351 if (EGL_TRUE != result) {
352 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
353 return false;
354 }
355 return true;
356}
357
358void EGLTestHelper::doClientSync() {
359 sk_gpu_test::FenceSync* fenceSync = fGLCtx->fenceSync();
360 sk_gpu_test::PlatformFence fence = fenceSync->insertFence();
361 fenceSync->waitFence(fence);
362 fenceSync->deleteFence(fence);
363}
364
365#define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
366
367#define ACQUIRE_INST_VK_PROC(name) \
368 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
369 VK_NULL_HANDLE)); \
370 if (fVk##name == nullptr) { \
371 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
372 return false; \
373 }
374
375#define ACQUIRE_DEVICE_VK_PROC(name) \
376 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
377 if (fVk##name == nullptr) { \
378 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
379 return false; \
380 }
381
382class VulkanTestHelper : public BaseTestHelper {
383public:
384 VulkanTestHelper() {}
385
386 ~VulkanTestHelper() override {}
387
388 void releaseImage() override {
389 if (VK_NULL_HANDLE == fDevice) {
390 return;
391 }
392 if (fImage != VK_NULL_HANDLE) {
393 fVkDestroyImage(fDevice, fImage, nullptr);
394 fImage = VK_NULL_HANDLE;
395 }
396
397 if (fMemory != VK_NULL_HANDLE) {
398 fVkFreeMemory(fDevice, fMemory, nullptr);
399 fMemory = VK_NULL_HANDLE;
400 }
401 }
402 void cleanup() override {
403 this->releaseImage();
404
405 fGrContext.reset();
406 fBackendContext.fMemoryAllocator.reset();
407 if (fDevice != VK_NULL_HANDLE) {
408 fVkDeviceWaitIdle(fDevice);
409 fVkDestroyDevice(fDevice, nullptr);
410 fDevice = VK_NULL_HANDLE;
411 }
412#ifdef SK_ENABLE_VK_LAYERS
413 if (fDebugCallback != VK_NULL_HANDLE) {
414 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
415 }
416#endif
417 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
418 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
419 fBackendContext.fInstance = VK_NULL_HANDLE;
420 }
421
422 delete fExtensions;
423
424 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
425 delete fFeatures;
426 }
427
428 bool init(skiatest::Reporter* reporter) override;
429
430 void doClientSync() override {
431 if (!fGrContext) {
432 return;
433 }
434
435 fGrContext->contextPriv().getGpu()->testingOnly_flushGpuAndSync();
436 }
437
438 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
439 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
440 sk_sp<SkSurface>) override;
441
442 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
443 AHardwareBuffer* buffer) override;
444
445 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
446 AHardwareBuffer* buffer) override;
447
448 void makeCurrent() override {}
449
450 GrContext* grContext() override { return fGrContext.get(); }
451
452private:
453 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
454
455 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
456 GrVkImageInfo* outImageInfo);
457
458 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
459 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
460
461 DECLARE_VK_PROC(DestroyInstance);
462 DECLARE_VK_PROC(DeviceWaitIdle);
463 DECLARE_VK_PROC(DestroyDevice);
464
465 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
466 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
467 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
468
469 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
470
471 DECLARE_VK_PROC(CreateImage);
472 DECLARE_VK_PROC(GetImageMemoryRequirements2);
473 DECLARE_VK_PROC(DestroyImage);
474
475 DECLARE_VK_PROC(AllocateMemory);
476 DECLARE_VK_PROC(BindImageMemory2);
477 DECLARE_VK_PROC(FreeMemory);
478
479 DECLARE_VK_PROC(CreateSemaphore);
480 DECLARE_VK_PROC(GetSemaphoreFdKHR);
481 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
482 DECLARE_VK_PROC(DestroySemaphore);
483
484 VkImage fImage = VK_NULL_HANDLE;
485 VkDeviceMemory fMemory = VK_NULL_HANDLE;
486
487 GrVkExtensions* fExtensions = nullptr;
488 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
489 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
490 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
491
492 VkDevice fDevice = VK_NULL_HANDLE;
493
494 GrVkBackendContext fBackendContext;
495 sk_sp<GrContext> fGrContext;
496};
497
498bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
499 PFN_vkGetInstanceProcAddr instProc;
500 PFN_vkGetDeviceProcAddr devProc;
501 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
502 return false;
503 }
504 auto getProc = [&instProc, &devProc](const char* proc_name,
505 VkInstance instance, VkDevice device) {
506 if (device != VK_NULL_HANDLE) {
507 return devProc(device, proc_name);
508 }
509 return instProc(instance, proc_name);
510 };
511
512 fExtensions = new GrVkExtensions();
513 fFeatures = new VkPhysicalDeviceFeatures2;
514 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
515 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
516 fFeatures->pNext = nullptr;
517
518 fBackendContext.fInstance = VK_NULL_HANDLE;
519 fBackendContext.fDevice = VK_NULL_HANDLE;
520
521 if (!sk_gpu_test::CreateVkBackendContext(getProc, &fBackendContext, fExtensions,
522 fFeatures, &fDebugCallback)) {
523 return false;
524 }
525 fDevice = fBackendContext.fDevice;
526
527 if (fDebugCallback != VK_NULL_HANDLE) {
528 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
529 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
530 }
531
532 ACQUIRE_INST_VK_PROC(DestroyInstance);
533 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
534 ACQUIRE_INST_VK_PROC(DestroyDevice);
535
536 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
537 2)) {
538 return false;
539 }
540 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
541 return false;
542 }
543 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
544 return false;
545 }
546 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
547 // return false;
548 }
549
550 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
551 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
552 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
553
554 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
555
556 ACQUIRE_DEVICE_VK_PROC(CreateImage);
557 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
558 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
559
560 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
561 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
562 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
563
564 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
565 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
566 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
567 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
568
569 fGrContext = GrContext::MakeVulkan(fBackendContext);
570 REPORTER_ASSERT(reporter, fGrContext.get());
571 if (!fGrContext) {
572 return false;
573 }
574
575 return this->checkOptimalHardwareBuffer(reporter);
576}
577
578bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
579 VkResult err;
580
581 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
582 externalImageFormatInfo.sType =
583 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
584 externalImageFormatInfo.pNext = nullptr;
585 externalImageFormatInfo.handleType =
586 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
587 //externalImageFormatInfo.handType = 0x80;
588
589 // We will create the hardware buffer with gpu sampled so these usages should all be valid
590 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
591 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
592 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
593 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
594 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
595 imageFormatInfo.pNext = &externalImageFormatInfo;
596 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
597 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
598 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
599 imageFormatInfo.usage = usageFlags;
600 imageFormatInfo.flags = 0;
601
602 VkAndroidHardwareBufferUsageANDROID hwbUsage;
603 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
604 hwbUsage.pNext = nullptr;
605
606 VkExternalImageFormatProperties externalImgFormatProps;
607 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
608 externalImgFormatProps.pNext = &hwbUsage;
609
610 VkImageFormatProperties2 imgFormProps;
611 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
612 imgFormProps.pNext = &externalImgFormatProps;
613
614 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
615 &imageFormatInfo, &imgFormProps);
616 if (VK_SUCCESS != err) {
617 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
618 return false;
619 }
620
621 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
622 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
623 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
624
625 const VkExternalMemoryProperties& externalImageFormatProps =
626 externalImgFormatProps.externalMemoryProperties;
627 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
628 externalImageFormatProps.externalMemoryFeatures));
629 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
630 externalImageFormatProps.externalMemoryFeatures));
631
632 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
633 hwbUsage.androidHardwareBufferUsage));
634
635 return true;
636}
637
638bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
639 AHardwareBuffer* buffer,
640 bool forWrite,
641 GrVkImageInfo* outImageInfo) {
642 VkResult err;
643
644 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
645 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
646 hwbFormatProps.pNext = nullptr;
647
648 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
649 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
650 hwbProps.pNext = &hwbFormatProps;
651
652 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
653 if (VK_SUCCESS != err) {
Greg Daniel932dd072019-01-31 14:07:58 -0500654 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
Greg Daniel24d861d2019-01-30 15:13:22 -0500655 return false;
656 }
657
658 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
659 REPORTER_ASSERT(reporter,
660 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
661 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
662 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
663 if (forWrite) {
664 REPORTER_ASSERT(reporter,
665 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
666
667 }
668
669 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
670 const VkExternalFormatANDROID externalFormatInfo {
671 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
672 nullptr, // pNext
673 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
674 };
675
676 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
677 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
678 &externalFormatInfo, // pNext
679 //nullptr, // pNext
680 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
681 //0x80, // handleTypes
682 };
683
684 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
685 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
686 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
687 if (forWrite) {
688 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
689 }
690
691 const VkImageCreateInfo imageCreateInfo = {
692 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
693 &externalMemoryImageInfo, // pNext
694 0, // VkImageCreateFlags
695 VK_IMAGE_TYPE_2D, // VkImageType
696 hwbFormatProps.format, // VkFormat
697 { DEV_W, DEV_H, 1 }, // VkExtent3D
698 1, // mipLevels
699 1, // arrayLayers
700 VK_SAMPLE_COUNT_1_BIT, // samples
701 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
702 usageFlags, // VkImageUsageFlags
703 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
704 0, // queueFamilyCount
705 0, // pQueueFamilyIndices
706 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
707 };
708
709 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
710 if (VK_SUCCESS != err) {
711 ERRORF(reporter, "Create Image failed, err: %d", err);
712 return false;
713 }
714
715 VkImageMemoryRequirementsInfo2 memReqsInfo;
716 memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
717 memReqsInfo.pNext = nullptr;
718 memReqsInfo.image = fImage;
719
720 VkMemoryDedicatedRequirements dedicatedMemReqs;
721 dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
722 dedicatedMemReqs.pNext = nullptr;
723
724 VkMemoryRequirements2 memReqs;
725 memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
726 memReqs.pNext = &dedicatedMemReqs;
727
728 fVkGetImageMemoryRequirements2(fDevice, &memReqsInfo, &memReqs);
729 REPORTER_ASSERT(reporter, VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation);
730
731 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
732 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
733 phyDevMemProps.pNext = nullptr;
734
735 uint32_t typeIndex = 0;
736 uint32_t heapIndex = 0;
737 bool foundHeap = false;
738 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
739 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
740 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
741 if (hwbProps.memoryTypeBits & (1 << i)) {
742 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
743 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
744 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
745 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
746 typeIndex = i;
747 heapIndex = pdmp.memoryTypes[i].heapIndex;
748 foundHeap = true;
749 }
750 }
751 }
752 if (!foundHeap) {
753 ERRORF(reporter, "Failed to find valid heap for imported memory");
754 return false;
755 }
756
757 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
758 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
759 hwbImportInfo.pNext = nullptr;
760 hwbImportInfo.buffer = buffer;
761
762 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
763 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
764 dedicatedAllocInfo.pNext = &hwbImportInfo;
765 dedicatedAllocInfo.image = fImage;
766 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
767
768 VkMemoryAllocateInfo allocInfo = {
769 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
770 &dedicatedAllocInfo, // pNext
771 hwbProps.allocationSize, // allocationSize
772 typeIndex, // memoryTypeIndex
773 };
774
775 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
776 if (VK_SUCCESS != err) {
777 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
778 return false;
779 }
780
781 VkBindImageMemoryInfo bindImageInfo;
782 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
783 bindImageInfo.pNext = nullptr;
784 bindImageInfo.image = fImage;
785 bindImageInfo.memory = fMemory;
786 bindImageInfo.memoryOffset = 0;
787
788 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
789 if (VK_SUCCESS != err) {
790 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
791 return false;
792 }
793
794 outImageInfo->fImage = fImage;
795 outImageInfo->fAlloc = GrVkAlloc(fMemory, 0, hwbProps.allocationSize, 0);
796 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
797 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
798 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
799 outImageInfo->fLevelCount = 1;
800 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
801 return true;
802}
803
804sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
805 AHardwareBuffer* buffer) {
806 GrVkImageInfo imageInfo;
807 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
808 return nullptr;
809 }
810
811 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
812
813 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fGrContext.get(),
814 backendTex,
815 kTopLeft_GrSurfaceOrigin,
816 kRGBA_8888_SkColorType,
817 kPremul_SkAlphaType,
818 nullptr);
819
820 if (!wrappedImage.get()) {
821 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
822 return nullptr;
823 }
824
825 return wrappedImage;
826}
827
828bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
829 sk_sp<SkSurface> surface) {
830 surface->flush();
831 surface.reset();
832 GrBackendSemaphore semaphore;
833 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
834 return false;
835 }
836 GrSemaphoresSubmitted submitted = fGrContext->flushAndSignalSemaphores(1, &semaphore);
837 if (GrSemaphoresSubmitted::kNo == submitted) {
838 ERRORF(reporter, "Failing call to flushAndSignalSemaphores on SkSurface");
839 return false;
840 }
841 SkASSERT(semaphore.isInitialized());
842 if (!this->exportSemaphore(reporter, semaphore)) {
843 return false;
844 }
845 return true;
846}
847
848bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
849 GrBackendSemaphore* beSemaphore) {
850 // Query supported info
851 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
852 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
853 exSemInfo.pNext = nullptr;
854 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
855
856 VkExternalSemaphoreProperties exSemProps;
857 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
858 exSemProps.pNext = nullptr;
859
860 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
861 &exSemProps);
862
863 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
864 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
865 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
866 return false;
867 }
868 if (!SkToBool(exSemProps.compatibleHandleTypes &
869 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
870 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
871 return false;
872 }
873 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
874 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
875 !SkToBool(exSemProps.externalSemaphoreFeatures &
876 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
877 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
878 return false;
879 }
880
881 VkExportSemaphoreCreateInfo exportInfo;
882 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
883 exportInfo.pNext = nullptr;
884 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
885
886 VkSemaphoreCreateInfo semaphoreInfo;
887 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
888 semaphoreInfo.pNext = &exportInfo;
889 semaphoreInfo.flags = 0;
890
891 VkSemaphore semaphore;
892 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
893 if (VK_SUCCESS != err) {
894 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
895 return false;
896 }
897 beSemaphore->initVulkan(semaphore);
898 return true;
899}
900
901bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
902 const GrBackendSemaphore& beSemaphore) {
903 VkSemaphore semaphore = beSemaphore.vkSemaphore();
904 if (VK_NULL_HANDLE == semaphore) {
905 ERRORF(reporter, "Invalid vulkan handle in export call");
906 return false;
907 }
908
909 VkSemaphoreGetFdInfoKHR getFdInfo;
910 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
911 getFdInfo.pNext = nullptr;
912 getFdInfo.semaphore = semaphore;
913 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
914
915 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
916 if (VK_SUCCESS != err) {
917 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
918 return false;
919 }
920 fVkDestroySemaphore(fDevice, semaphore, nullptr);
921 return true;
922}
923
924bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
925 sk_sp<SkSurface> surface) {
926 VkSemaphoreCreateInfo semaphoreInfo;
927 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
928 semaphoreInfo.pNext = nullptr;
929 semaphoreInfo.flags = 0;
930
931 VkSemaphore semaphore;
932 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
933 if (VK_SUCCESS != err) {
934 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
935 return false;
936 }
937
938 VkImportSemaphoreFdInfoKHR importInfo;
939 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
940 importInfo.pNext = nullptr;
941 importInfo.semaphore = semaphore;
942 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
943 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
944 importInfo.fd = fdHandle;
945
946 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
947 if (VK_SUCCESS != err) {
948 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
949 return false;
950 }
951
952 GrBackendSemaphore beSemaphore;
953 beSemaphore.initVulkan(semaphore);
954 if (!surface->wait(1, &beSemaphore)) {
955 ERRORF(reporter, "Failed to add wait semaphore to surface");
956 fVkDestroySemaphore(fDevice, semaphore, nullptr);
957 return false;
958 }
959 return true;
960}
961
962sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
963 AHardwareBuffer* buffer) {
964 GrVkImageInfo imageInfo;
965 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
966 return nullptr;
967 }
968
969 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
970
971 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext.get(),
972 backendTex,
973 kTopLeft_GrSurfaceOrigin,
974 0,
975 kRGBA_8888_SkColorType,
976 nullptr, nullptr);
977
978 if (!surface.get()) {
979 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
980 return nullptr;
981 }
982
983 return surface;
984}
985
986static SkPMColor get_src_color(int x, int y) {
987 SkASSERT(x >= 0 && x < DEV_W);
988 SkASSERT(y >= 0 && y < DEV_H);
989
990 U8CPU r = x;
991 U8CPU g = y;
992 U8CPU b = 0xc;
993
994 U8CPU a = 0xff;
995 switch ((x+y) % 5) {
996 case 0:
997 a = 0xff;
998 break;
999 case 1:
1000 a = 0x80;
1001 break;
1002 case 2:
1003 a = 0xCC;
1004 break;
1005 case 4:
1006 a = 0x01;
1007 break;
1008 case 3:
1009 a = 0x00;
1010 break;
1011 }
1012 a = 0xff;
1013 return SkPremultiplyARGBInline(a, r, g, b);
1014}
1015
1016static SkBitmap make_src_bitmap() {
1017 static SkBitmap bmp;
1018 if (bmp.isNull()) {
1019 bmp.allocN32Pixels(DEV_W, DEV_H);
1020 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1021 for (int y = 0; y < DEV_H; ++y) {
1022 for (int x = 0; x < DEV_W; ++x) {
1023 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1024 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1025 *pixel = get_src_color(x, y);
1026 }
1027 }
1028 }
1029 return bmp;
1030}
1031
1032static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1033 const SkBitmap& dstBitmap) {
1034 bool result = true;
1035 for (int y = 0; y < DEV_H && result; ++y) {
1036 for (int x = 0; x < DEV_W && result; ++x) {
1037 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1038 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1039 if (srcPixel != dstPixel) {
1040 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1041 x, y, srcPixel, dstPixel);
1042 result = false;
1043 } /*else {
1044 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1045 x, y, srcPixel, dstPixel);
1046
1047 }*/
1048 }
1049 }
1050 return result;
1051}
1052
1053static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1054 AHardwareBuffer* buffer) {
1055 if (srcHelper) {
1056 srcHelper->cleanup();
1057 }
1058 if (dstHelper) {
1059 dstHelper->cleanup();
1060 }
1061 if (buffer) {
1062 AHardwareBuffer_release(buffer);
1063 }
1064}
1065
1066enum class SrcType {
1067 kCPU,
1068 kEGL,
1069 kVulkan,
1070};
1071
1072enum class DstType {
1073 kEGL,
1074 kVulkan,
1075};
1076
1077void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1078 SrcType srcType, DstType dstType, bool shareSyncs) {
1079 if (SrcType::kCPU == srcType && shareSyncs) {
1080 // We don't currently test this since we don't do any syncs in this case.
1081 return;
1082 }
1083 std::unique_ptr<BaseTestHelper> srcHelper;
1084 std::unique_ptr<BaseTestHelper> dstHelper;
1085 AHardwareBuffer* buffer = nullptr;
1086 if (SrcType::kVulkan == srcType) {
1087 srcHelper.reset(new VulkanTestHelper());
1088 } else if (SrcType::kEGL == srcType) {
1089 srcHelper.reset(new EGLTestHelper(options));
1090 }
1091 if (srcHelper) {
1092 if (!srcHelper->init(reporter)) {
1093 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1094 return;
1095 }
1096 }
1097
1098 if (DstType::kVulkan == dstType) {
1099 dstHelper.reset(new VulkanTestHelper());
1100 } else {
1101 SkASSERT(DstType::kEGL == dstType);
1102 dstHelper.reset(new EGLTestHelper(options));
1103 }
1104 if (dstHelper) {
1105 if (!dstHelper->init(reporter)) {
1106 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1107 return;
1108 }
1109 }
1110
1111 ///////////////////////////////////////////////////////////////////////////
1112 // Setup SkBitmaps
1113 ///////////////////////////////////////////////////////////////////////////
1114
1115 SkBitmap srcBitmap = make_src_bitmap();
1116 SkBitmap dstBitmapSurface;
1117 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1118 SkBitmap dstBitmapFinal;
1119 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1120
1121 ///////////////////////////////////////////////////////////////////////////
1122 // Setup AHardwareBuffer
1123 ///////////////////////////////////////////////////////////////////////////
1124
1125 AHardwareBuffer_Desc hwbDesc;
1126 hwbDesc.width = DEV_W;
1127 hwbDesc.height = DEV_H;
1128 hwbDesc.layers = 1;
1129 if (SrcType::kCPU == srcType) {
1130 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1131 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1132 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1133 } else {
1134 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1135 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1136 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1137 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1138 }
1139 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1140 // The following three are not used in the allocate
1141 hwbDesc.stride = 0;
1142 hwbDesc.rfu0= 0;
1143 hwbDesc.rfu1= 0;
1144
1145 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1146 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1147 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1148 return;
1149 }
1150
1151 if (SrcType::kCPU == srcType) {
1152 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1153 AHardwareBuffer_describe(buffer, &hwbDesc);
1154
1155 uint32_t* bufferAddr;
1156 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1157 reinterpret_cast<void**>(&bufferAddr))) {
1158 ERRORF(reporter, "Failed to lock hardware buffer");
1159 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1160 return;
1161 }
1162
1163 int bbp = srcBitmap.bytesPerPixel();
1164 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1165 uint32_t* dst = bufferAddr;
1166 for (int y = 0; y < DEV_H; ++y) {
1167 memcpy(dst, src, DEV_W * bbp);
1168 src += DEV_W;
1169 dst += hwbDesc.stride;
1170 }
1171
1172 for (int y = 0; y < DEV_H; ++y) {
1173 for (int x = 0; x < DEV_W; ++x) {
1174 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1175 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1176 if (srcPixel != dstPixel) {
1177 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1178 x, y, srcPixel, dstPixel);
1179 }
1180 }
1181 }
1182
1183 AHardwareBuffer_unlock(buffer, nullptr);
1184
1185 } else {
1186 srcHelper->makeCurrent();
1187 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1188
1189 if (!surface) {
1190 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1191 return;
1192 }
1193
1194 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap);
1195 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1196
1197 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1198 // to occur.
1199 if (!shareSyncs) {
1200 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1201 if (!readResult) {
1202 ERRORF(reporter, "Read Pixels on surface failed");
1203 surface.reset();
1204 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1205 return;
1206 }
1207 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1208 }
1209
1210 ///////////////////////////////////////////////////////////////////////////
1211 // Cleanup GL/EGL and add syncs
1212 ///////////////////////////////////////////////////////////////////////////
1213
1214 if (shareSyncs) {
1215 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1216 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1217 return;
1218 }
1219 } else {
1220 surface.reset();
1221 srcHelper->doClientSync();
1222 srcHelper->releaseImage();
1223 }
1224 }
1225
1226 ///////////////////////////////////////////////////////////////////////////
1227 // Import the HWB into backend and draw it to a surface
1228 ///////////////////////////////////////////////////////////////////////////
1229
1230 dstHelper->makeCurrent();
1231 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1232
1233 if (!wrappedImage) {
1234 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1235 return;
1236 }
1237
1238 GrContext* grContext = dstHelper->grContext();
1239
1240 // Make SkSurface to render wrapped HWB into.
1241 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1242 kPremul_SkAlphaType, nullptr);
1243
1244 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(grContext,
1245 SkBudgeted::kNo, imageInfo, 0,
1246 kTopLeft_GrSurfaceOrigin,
1247 nullptr, false);
1248 if (!dstSurf.get()) {
1249 ERRORF(reporter, "Failed to create destination SkSurface");
1250 wrappedImage.reset();
1251 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1252 return;
1253 }
1254
1255 if (shareSyncs) {
1256 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1257 wrappedImage.reset();
1258 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1259 return;
1260 }
1261 }
1262 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1263
1264 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1265 if (!readResult) {
1266 ERRORF(reporter, "Read Pixels failed");
1267 wrappedImage.reset();
1268 dstSurf.reset();
1269 dstHelper->doClientSync();
1270 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1271 return;
1272 }
1273
1274 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1275
1276 dstSurf.reset();
1277 wrappedImage.reset();
1278 dstHelper->doClientSync();
1279 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1280}
1281
1282DEF_GPUTEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options) {
1283 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1284}
1285
1286DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options) {
1287 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1288}
1289
1290DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan, reporter, options) {
1291 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1292}
1293
1294DEF_GPUTEST(VulkanHardwareBuffer_CPU_EGL, reporter, options) {
1295 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1296}
1297
1298DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL, reporter, options) {
1299 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1300}
1301
1302DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options) {
1303 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1304}
1305
1306DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL_Syncs, reporter, options) {
1307 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1308}
1309
1310DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs, reporter, options) {
1311 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1312}
1313
1314DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs, reporter, options) {
1315 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1316}
1317
1318DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs, reporter, options) {
1319 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1320}
1321
1322#endif
1323