blob: 54c9368571ffe2f8efb21adbea71908caeecbca4 [file] [log] [blame]
Greg Daniel173464d2019-02-06 15:30:34 -05001/*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkTypes.h"
9
10#if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26
11#define GL_GLEXT_PROTOTYPES
12#define EGL_EGLEXT_PROTOTYPES
13
14#include "GrAHardwareBufferUtils.h"
15
Karsten Tausche98b4b382021-07-19 15:08:16 +020016#include <algorithm>
17#include <array>
18#include <cstdio>
19#include <cstring>
20#include <string>
21
Greg Daniel173464d2019-02-06 15:30:34 -050022#include <android/hardware_buffer.h>
23
24#include "GrContext.h"
25#include "GrContextPriv.h"
26#include "gl/GrGLDefines.h"
27#include "gl/GrGLTypes.h"
28
29#ifdef SK_VULKAN
30#include "vk/GrVkCaps.h"
31#include "vk/GrVkGpu.h"
32#endif
33
34#include <EGL/egl.h>
35#include <EGL/eglext.h>
36#include <GLES/gl.h>
37#include <GLES/glext.h>
38
Karsten Tauschee52cc572020-09-30 16:17:08 +020039#include <cutils/properties.h>
40
41// Direct access to private framework and HAL data to workaround Adreno 330 driver bugs.
42// DO NOT actually call anything from these headers; they are included only to access private
43// structs.
44#include "../../../../frameworks/native/libs/nativebase/include/nativebase/nativebase.h"
45#include "../../../../hardware/qcom/display/libgralloc/gralloc_priv.h"
46
Greg Daniel173464d2019-02-06 15:30:34 -050047#define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
48#define EGL_PROTECTED_CONTENT_EXT 0x32C0
49
50#define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
51
52namespace GrAHardwareBufferUtils {
53
54SkColorType GetSkColorTypeFromBufferFormat(uint32_t bufferFormat) {
55 switch (bufferFormat) {
56 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
Greg Daniel173464d2019-02-06 15:30:34 -050057 return kRGBA_8888_SkColorType;
Greg Daniel4b06a922019-02-14 09:43:36 -050058 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
59 return kRGB_888x_SkColorType;
Greg Daniel173464d2019-02-06 15:30:34 -050060 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
61 return kRGBA_F16_SkColorType;
62 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
63 return kRGB_565_SkColorType;
64 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
65 return kRGB_888x_SkColorType;
66 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
67 return kRGBA_1010102_SkColorType;
68 default:
69 // Given that we only use this texture as a source, colorType will not impact how Skia
70 // uses the texture. The only potential affect this is anticipated to have is that for
71 // some format types if we are not bound as an OES texture we may get invalid results
72 // for SKP capture if we read back the texture.
73 return kRGBA_8888_SkColorType;
74 }
75}
76
77GrBackendFormat GetBackendFormat(GrContext* context, AHardwareBuffer* hardwareBuffer,
78 uint32_t bufferFormat, bool requireKnownFormat) {
79 GrBackendApi backend = context->backend();
80
81 if (backend == GrBackendApi::kOpenGL) {
82 switch (bufferFormat) {
83 //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
84 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
85 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
86 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
87 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
88 return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL);
89 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
90 return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL);
91 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
92 return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL);
93 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
94 return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL);
95 default:
96 if (requireKnownFormat) {
97 return GrBackendFormat();
98 } else {
99 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
100 }
101 }
102 } else if (backend == GrBackendApi::kVulkan) {
103#ifdef SK_VULKAN
104 switch (bufferFormat) {
105 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
106 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
107 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
108 return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
109 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
110 return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
111 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
112 return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
113 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
114 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
115 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
116 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
117 default: {
118 if (requireKnownFormat) {
119 return GrBackendFormat();
120 } else {
121 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->priv().getGpu());
122 SkASSERT(gpu);
123 VkDevice device = gpu->device();
124
125 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
126 return GrBackendFormat();
127 }
128 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
129 hwbFormatProps.sType =
130 VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
131 hwbFormatProps.pNext = nullptr;
132
133 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
134 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
135 hwbProps.pNext = &hwbFormatProps;
136
137 VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device,
138 hardwareBuffer,
139 &hwbProps));
140 if (VK_SUCCESS != err) {
141 return GrBackendFormat();
142 }
143
144 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
145 return GrBackendFormat();
146 }
147
148 GrVkYcbcrConversionInfo ycbcrConversion;
149 ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel;
150 ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange;
151 ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset;
152 ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset;
153 ycbcrConversion.fForceExplicitReconstruction = VK_FALSE;
154 ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat;
155 ycbcrConversion.fExternalFormatFeatures = hwbFormatProps.formatFeatures;
156 if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
157 hwbFormatProps.formatFeatures) {
158 ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR;
159 } else {
160 ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST;
161 }
162
163 return GrBackendFormat::MakeVk(ycbcrConversion);
164 }
165 }
166 }
167#else
168 return GrBackendFormat();
169#endif
170 }
171 return GrBackendFormat();
172}
173
Karsten Tauschee52cc572020-09-30 16:17:08 +0200174class NativeGLCleanupHelper {
175public:
176 NativeGLCleanupHelper(GrGLuint texID)
177 : fTexID(texID) { }
178 ~NativeGLCleanupHelper() {
179 // Compared to GLCleanupHelper, we don't have an EGLImageKHR, but a
180 // regular GL texture with its own storage. Thus, there's also no
181 // reference to the AHardwareBuffer anymore.
182 glDeleteTextures(1, &fTexID);
183 }
184private:
185 GrGLuint fTexID;
186};
187
188void delete_native_gl_texture(void* context) {
189 auto cleanupHelper = static_cast<NativeGLCleanupHelper*>(context);
190 delete cleanupHelper;
191}
192
Luca Weiss85556f82021-11-30 14:45:50 +0100193static const auto EGLImageWorkaroundAllowlist = std::array{
194 "android.uirendering.cts",
195 "com.android.launcher3",
Karsten Tausche98b4b382021-07-19 15:08:16 +0200196};
197
Luca Weiss85556f82021-11-30 14:45:50 +0100198static bool isProcessOnEGLWorkaroundAllowlist() {
Karsten Tausche98b4b382021-07-19 15:08:16 +0200199 static bool checked = false;
Luca Weiss85556f82021-11-30 14:45:50 +0100200 static bool isOnAllowlist = false;
Karsten Tausche98b4b382021-07-19 15:08:16 +0200201 if (checked) {
Luca Weiss85556f82021-11-30 14:45:50 +0100202 return isOnAllowlist;
Karsten Tausche98b4b382021-07-19 15:08:16 +0200203 }
204
Luca Weiss876d44c2021-11-30 14:33:36 +0100205 const char* processName = getprogname();
206 if (!processName) {
Karsten Tausche98b4b382021-07-19 15:08:16 +0200207 // Cannot read, so need to assume false.
Luca Weiss85556f82021-11-30 14:45:50 +0100208 isOnAllowlist = false;
Karsten Tausche98b4b382021-07-19 15:08:16 +0200209 checked = true;
Luca Weiss85556f82021-11-30 14:45:50 +0100210 return isOnAllowlist;
Karsten Tausche98b4b382021-07-19 15:08:16 +0200211 }
212
Luca Weiss85556f82021-11-30 14:45:50 +0100213 const auto& l = EGLImageWorkaroundAllowlist;
214 isOnAllowlist = std::find(l.begin(), l.end(), std::string(processName)) != l.end();
Karsten Tausche98b4b382021-07-19 15:08:16 +0200215 checked = true;
216
Luca Weiss85556f82021-11-30 14:45:50 +0100217 if (isOnAllowlist) {
218 SkDebugf("createGLTextureFromPrivateHandle: process is listed for EGLImage "
Karsten Tausche98b4b382021-07-19 15:08:16 +0200219 "workaround");
220 }
221
Luca Weiss85556f82021-11-30 14:45:50 +0100222 return isOnAllowlist;
Karsten Tausche98b4b382021-07-19 15:08:16 +0200223}
224
Karsten Tauschee52cc572020-09-30 16:17:08 +0200225static GrBackendTexture createGLTextureFromPrivateHandle(
226 GrContext* context, const AHardwareBuffer* hardwareBuffer,
227 const int imageWidth, const int imageHeight,
228 DeleteImageProc* deleteProc, DeleteImageCtx* deleteCtx,
229 const GrBackendFormat& backendFormat) {
230
Luca Weiss85556f82021-11-30 14:45:50 +0100231 if (!isProcessOnEGLWorkaroundAllowlist()) {
Karsten Tausche98b4b382021-07-19 15:08:16 +0200232 return GrBackendTexture();
233 }
234
Karsten Tauschee52cc572020-09-30 16:17:08 +0200235 if (!hardwareBuffer) {
236 SkDebugf("createGLTextureFromPrivateHandle: Cannot work without AHardwareBuffer");
237 return GrBackendTexture();
238 }
239
240 const GrGLenum* glFormat = backendFormat.getGLFormat();
241 if (!glFormat) {
242 SkDebugf("createGLTextureFromPrivateHandle: backend API must be GL.");
243 return GrBackendTexture();
244 }
245
246 size_t bytesPerPixel = 0;
247 GrGLenum type = GL_INVALID_ENUM;
248 // In the fall-back OpenGL ES 2.0 context sized formats are not supported,
249 // so translate to the unsized format enums.
250 GrGLenum format = GL_INVALID_ENUM;
251 switch (*glFormat) {
252 case GR_GL_RGBA8:
253 format = GL_RGBA;
254 bytesPerPixel = 4;
255 type = GL_UNSIGNED_BYTE;
256 break;
257 case GR_GL_RGB565:
258 format = GL_RGB;
259 bytesPerPixel = 2;
260 type = GL_UNSIGNED_SHORT_5_6_5;
261 break;
262 default:
263 SkDebugf("createGLTextureFromPrivateHandle: Unsupported GL format %u", *glFormat);
264 return GrBackendTexture();
265 }
266
267 EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
268 const native_handle_t* nativeHandle =
269 reinterpret_cast<const ANativeWindowBuffer*>(clientBuffer)->handle;
270 const bool handleIsAsExpected = private_handle_t::validate(nativeHandle) == 0;
271 if (!handleIsAsExpected) {
272 SkDebugf("createGLTextureFromPrivateHandle: GraphicBuffer doesn't seem to map to gralloc "
273 "private handle.");
274 return GrBackendTexture();
275 }
276 const private_handle_t* hnd = static_cast<const private_handle_t*>(nativeHandle);
277
Karsten Tausche0a6688c2021-06-18 17:11:23 +0200278 if (hnd->flags & private_handle_t::PRIV_FLAGS_NON_CPU_WRITER) {
279 // The buffer comes from a non-CPU hardware component. Specifically for buffers coming from
280 // hardware encoders, access doesn't seem to work reliably in the way it's done here.
281 SkDebugf("createGLTextureFromPrivateHandle: Cannot handle non-CPU buffers.");
282 return GrBackendTexture();
283 }
284
Karsten Tausche4cc3ce22021-07-12 21:50:38 +0200285 if (hnd->flags & private_handle_t::PRIV_FLAGS_HW_COMPOSER) {
286 SkDebugf("createGLTextureFromPrivateHandle: Cannot handle hardware composer buffers.");
287 return GrBackendTexture();
288 }
289
Karsten Tauschee52cc572020-09-30 16:17:08 +0200290 const int bufferWidth = hnd->width; // May be aligned and be larger than the actual image.
291 const int bufferHeight = hnd->height;
292 if (imageWidth > bufferWidth || imageHeight > bufferHeight) {
293 SkDebugf("createGLTextureFromPrivateHandle: image is larger than the buffer. This is not "
294 "supposed to happen.");
295 return GrBackendTexture();
296 }
297 const size_t bufferRowBytes = bufferWidth * bytesPerPixel;
298 // We access as many rows as the image has, aligned to the width of the buffer.
299 const size_t minBufferSize = bufferRowBytes * imageHeight;
300 if (hnd->size < 0 || static_cast<size_t>(hnd->size) < minBufferSize) {
301 SkDebugf("createGLTextureFromPrivateHandle: buffer is smaller than expected or invalid.");
302 return GrBackendTexture();
303 }
304
305 const char* bufferData = reinterpret_cast<const char*>(hnd->base);
306
307 GrGLuint texID;
308 glGenTextures(1, &texID);
309 glBindTexture(GL_TEXTURE_2D, texID);
310
311 if (imageWidth == bufferWidth) {
312 // Take the quick path if possible
313 glTexImage2D(GL_TEXTURE_2D, 0, format, imageWidth, imageHeight, 0, format, type, bufferData);
314 } else {
315 // The buffer has some extra space for alignment. Copy row by row.
316 // First allocate the texture storage without filling it.
317 glTexImage2D(GL_TEXTURE_2D, 0, format, imageWidth, imageHeight, 0, format, type, 0);
318 for (int y = 0; y < imageHeight; ++y) {
319 const void* bufferRowAddr = bufferData + static_cast<size_t>(y) * bufferRowBytes;
320 glTexSubImage2D(GL_TEXTURE_2D, 0, 0, y, imageWidth, 1, format, type, bufferRowAddr);
321 }
322 }
323 int gl_error;
324 bool hasGlError = false;
325 while (GL_NO_ERROR != (gl_error = glGetError())) {
326 SkDebugf("createGLTextureFromPrivateHandle: glGetError reports %i", gl_error);
327 hasGlError = true;
328 }
329 if (hasGlError) {
330 SkDebugf("createGLTextureFromPrivateHandle: discarding results, because we had GL errors.");
331 glDeleteTextures(1, &texID);
332 return GrBackendTexture();
333 }
334
335 GrGLTextureInfo textureInfo;
336 textureInfo.fID = texID;
337 SkASSERT(backendFormat.isValid());
338 textureInfo.fTarget = GL_TEXTURE_2D;
339 // Store the original, potentially sized format here, not the more compatible unsized one.
340 textureInfo.fFormat = *glFormat;
341
342 GrBackendTexture backendTex(imageWidth, imageHeight, GrMipMapped::kNo, textureInfo);
343
344 *deleteProc = delete_native_gl_texture;
345 *deleteCtx = new NativeGLCleanupHelper(texID);
346
347 return backendTex;
348}
349
Greg Daniel173464d2019-02-06 15:30:34 -0500350class GLCleanupHelper {
351public:
352 GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
353 : fTexID(texID)
354 , fImage(image)
355 , fDisplay(display) { }
356 ~GLCleanupHelper() {
357 glDeleteTextures(1, &fTexID);
358 // eglDestroyImageKHR will remove a ref from the AHardwareBuffer
359 eglDestroyImageKHR(fDisplay, fImage);
360 }
361private:
362 GrGLuint fTexID;
363 EGLImageKHR fImage;
364 EGLDisplay fDisplay;
365};
366
367void delete_gl_texture(void* context) {
368 GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
369 delete cleanupHelper;
370}
371
372static GrBackendTexture make_gl_backend_texture(
373 GrContext* context, AHardwareBuffer* hardwareBuffer,
374 int width, int height,
375 DeleteImageProc* deleteProc,
376 DeleteImageCtx* deleteCtx,
377 bool isProtectedContent,
378 const GrBackendFormat& backendFormat,
379 bool isRenderable) {
380 while (GL_NO_ERROR != glGetError()) {} //clear GL errors
381
Karsten Tauschee52cc572020-09-30 16:17:08 +0200382 // On Adreno 330 drivers sampling from EGLImage bound to GL_TEXTURE_EXTERNAL_OES texture targets
383 // leads to artifacts. Copy image data into a regular GL texture instead.
384 static const bool gl_tex_workaround_enabled =
385 [] () -> bool {
386 const int value = property_get_bool("skia.force_gl_texture", 0);
387 SkDebugf("make_gl_backend_texture skia.force_gl_texture=%i", value);
388 return bool(value);
389 }();
390
391 if (gl_tex_workaround_enabled) {
392 GrBackendTexture tex = createGLTextureFromPrivateHandle(
393 context, hardwareBuffer, width, height, deleteProc, deleteCtx, backendFormat);
394
395 if (tex.isValid()) {
396 return tex;
397 }
398 SkDebugf("make_gl_backend_texture: Private data access workaround failed. Falling back "
399 "native buffer access.");
400 }
401
Greg Daniel173464d2019-02-06 15:30:34 -0500402 EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
403 EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
404 isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
405 isProtectedContent ? EGL_TRUE : EGL_NONE,
406 EGL_NONE };
407 EGLDisplay display = eglGetCurrentDisplay();
408 // eglCreateImageKHR will add a ref to the AHardwareBuffer
409 EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
410 clientBuffer, attribs);
411 if (EGL_NO_IMAGE_KHR == image) {
412 SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
413 return GrBackendTexture();
414 }
415
416 GrGLuint texID;
417 glGenTextures(1, &texID);
418 if (!texID) {
419 eglDestroyImageKHR(display, image);
420 return GrBackendTexture();
421 }
422
423 GrGLuint target = isRenderable ? GR_GL_TEXTURE_2D : GR_GL_TEXTURE_EXTERNAL;
424
425 glBindTexture(target, texID);
426 GLenum status = GL_NO_ERROR;
427 if ((status = glGetError()) != GL_NO_ERROR) {
428 SkDebugf("glBindTexture failed (%#x)", (int) status);
429 glDeleteTextures(1, &texID);
430 eglDestroyImageKHR(display, image);
431 return GrBackendTexture();
432 }
433 glEGLImageTargetTexture2DOES(target, image);
434 if ((status = glGetError()) != GL_NO_ERROR) {
435 SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
436 glDeleteTextures(1, &texID);
437 eglDestroyImageKHR(display, image);
438 return GrBackendTexture();
439 }
440 context->resetContext(kTextureBinding_GrGLBackendState);
441
442 GrGLTextureInfo textureInfo;
443 textureInfo.fID = texID;
444 SkASSERT(backendFormat.isValid());
445 textureInfo.fTarget = target;
446 textureInfo.fFormat = *backendFormat.getGLFormat();
447
448 *deleteProc = delete_gl_texture;
449 *deleteCtx = new GLCleanupHelper(texID, image, display);
450
451 return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo);
452}
453
454#ifdef SK_VULKAN
455class VulkanCleanupHelper {
456public:
457 VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
458 : fDevice(gpu->device())
459 , fImage(image)
460 , fMemory(memory)
461 , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
462 , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
463 ~VulkanCleanupHelper() {
464 fDestroyImage(fDevice, fImage, nullptr);
465 fFreeMemory(fDevice, fMemory, nullptr);
466 }
467private:
468 VkDevice fDevice;
469 VkImage fImage;
470 VkDeviceMemory fMemory;
471 PFN_vkDestroyImage fDestroyImage;
472 PFN_vkFreeMemory fFreeMemory;
473};
474
475void delete_vk_image(void* context) {
476 VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
477 delete cleanupHelper;
478}
479
480static GrBackendTexture make_vk_backend_texture(
481 GrContext* context, AHardwareBuffer* hardwareBuffer,
482 int width, int height,
483 DeleteImageProc* deleteProc,
484 DeleteImageCtx* deleteCtx,
485 bool isProtectedContent,
486 const GrBackendFormat& backendFormat,
487 bool isRenderable) {
488 SkASSERT(context->backend() == GrBackendApi::kVulkan);
489 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->priv().getGpu());
490
491 VkPhysicalDevice physicalDevice = gpu->physicalDevice();
492 VkDevice device = gpu->device();
493
494 SkASSERT(gpu);
495
496 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
497 return GrBackendTexture();
498 }
499
500 SkASSERT(backendFormat.getVkFormat());
501 VkFormat format = *backendFormat.getVkFormat();
502
503 VkResult err;
504
505 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
506 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
507 hwbFormatProps.pNext = nullptr;
508
509 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
510 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
511 hwbProps.pNext = &hwbFormatProps;
512
513 err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
514 if (VK_SUCCESS != err) {
515 return GrBackendTexture();
516 }
517
518 VkExternalFormatANDROID externalFormat;
519 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
520 externalFormat.pNext = nullptr;
521 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
522
523 const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo();
524 if (!ycbcrConversion) {
525 return GrBackendTexture();
526 }
527
528 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
529 // TODO: We should not assume the transfer features here and instead should have a way for
530 // Ganesh's tracking of intenral images to report whether or not they support transfers.
531 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
532 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
533 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
534 SkASSERT(!ycbcrConversion->isValid());
535 } else {
536 SkASSERT(ycbcrConversion->isValid());
537 // We have an external only format
538 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures));
539 SkASSERT(format == VK_FORMAT_UNDEFINED);
540 SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat);
541 externalFormat.externalFormat = hwbFormatProps.externalFormat;
542 }
543 SkASSERT(format == hwbFormatProps.format);
544
545 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
546 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
547 &externalFormat, // pNext
548 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
549 };
550 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
551 if (format != VK_FORMAT_UNDEFINED) {
552 usageFlags = usageFlags |
553 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
554 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
555 if (isRenderable) {
556 usageFlags = usageFlags | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
557 }
558 }
559
560 // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
561 // to use linear. Add better linear support throughout Ganesh.
562 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
563
564 const VkImageCreateInfo imageCreateInfo = {
565 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
566 &externalMemoryImageInfo, // pNext
567 0, // VkImageCreateFlags
568 VK_IMAGE_TYPE_2D, // VkImageType
569 format, // VkFormat
570 { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
571 1, // mipLevels
572 1, // arrayLayers
573 VK_SAMPLE_COUNT_1_BIT, // samples
574 tiling, // VkImageTiling
575 usageFlags, // VkImageUsageFlags
576 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
577 0, // queueFamilyCount
578 0, // pQueueFamilyIndices
579 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
580 };
581
582 VkImage image;
583 err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
584 if (VK_SUCCESS != err) {
585 return GrBackendTexture();
586 }
587
588 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
589 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
590 phyDevMemProps.pNext = nullptr;
591
592 uint32_t typeIndex = 0;
593 uint32_t heapIndex = 0;
594 bool foundHeap = false;
595 VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
596 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
597 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
598 if (hwbProps.memoryTypeBits & (1 << i)) {
599 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
600 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
601 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
602 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
603 typeIndex = i;
604 heapIndex = pdmp.memoryTypes[i].heapIndex;
605 foundHeap = true;
606 }
607 }
608 }
609 if (!foundHeap) {
610 VK_CALL(DestroyImage(device, image, nullptr));
611 return GrBackendTexture();
612 }
613
614 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
615 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
616 hwbImportInfo.pNext = nullptr;
617 hwbImportInfo.buffer = hardwareBuffer;
618
619 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
620 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
621 dedicatedAllocInfo.pNext = &hwbImportInfo;
622 dedicatedAllocInfo.image = image;
623 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
624
625 VkMemoryAllocateInfo allocInfo = {
626 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
627 &dedicatedAllocInfo, // pNext
628 hwbProps.allocationSize, // allocationSize
629 typeIndex, // memoryTypeIndex
630 };
631
632 VkDeviceMemory memory;
633
634 err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
635 if (VK_SUCCESS != err) {
636 VK_CALL(DestroyImage(device, image, nullptr));
637 return GrBackendTexture();
638 }
639
640 VkBindImageMemoryInfo bindImageInfo;
641 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
642 bindImageInfo.pNext = nullptr;
643 bindImageInfo.image = image;
644 bindImageInfo.memory = memory;
645 bindImageInfo.memoryOffset = 0;
646
647 err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
648 if (VK_SUCCESS != err) {
649 VK_CALL(DestroyImage(device, image, nullptr));
650 VK_CALL(FreeMemory(device, memory, nullptr));
651 return GrBackendTexture();
652 }
653
654 GrVkImageInfo imageInfo;
655
656 imageInfo.fImage = image;
657 imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
658 imageInfo.fImageTiling = tiling;
659 imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
660 imageInfo.fFormat = format;
661 imageInfo.fLevelCount = 1;
662 // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
663 // support that extension. Or if we know the source of the AHardwareBuffer is not from a
664 // "foreign" device we can leave them as external.
665 imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
666 imageInfo.fYcbcrConversionInfo = *ycbcrConversion;
667
668 *deleteProc = delete_vk_image;
669 *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
670
671 return GrBackendTexture(width, height, imageInfo);
672}
673#endif
674
675static bool can_import_protected_content_eglimpl() {
676 EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
677 const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
678 size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR);
679 size_t extsLen = strlen(exts);
680 bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts);
681 bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1);
682 bool atEnd = (cropExtLen+1) < extsLen
683 && !strcmp(" " PROT_CONTENT_EXT_STR,
684 exts + extsLen - (cropExtLen+1));
685 bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " ");
686 return equal || atStart || atEnd || inMiddle;
687}
688
689static bool can_import_protected_content(GrContext* context) {
690 if (GrBackendApi::kOpenGL == context->backend()) {
691 // Only compute whether the extension is present once the first time this
692 // function is called.
693 static bool hasIt = can_import_protected_content_eglimpl();
694 return hasIt;
695 }
696 return false;
697}
698
699GrBackendTexture MakeBackendTexture(GrContext* context, AHardwareBuffer* hardwareBuffer,
700 int width, int height,
701 DeleteImageProc* deleteProc,
702 DeleteImageCtx* deleteCtx,
703 bool isProtectedContent,
704 const GrBackendFormat& backendFormat,
705 bool isRenderable) {
706 if (context->abandoned()) {
707 return GrBackendTexture();
708 }
709 bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
710
711 if (GrBackendApi::kOpenGL == context->backend()) {
712 return make_gl_backend_texture(context, hardwareBuffer, width, height, deleteProc,
713 deleteCtx, createProtectedImage, backendFormat,
714 isRenderable);
715 } else {
716 SkASSERT(GrBackendApi::kVulkan == context->backend());
717#ifdef SK_VULKAN
718 // Currently we don't support protected images on vulkan
719 SkASSERT(!createProtectedImage);
720 return make_vk_backend_texture(context, hardwareBuffer, width, height, deleteProc,
721 deleteCtx, createProtectedImage, backendFormat,
722 isRenderable);
723#else
724 return GrBackendTexture();
725#endif
726 }
727}
728
729} // GrAHardwareBufferUtils
730
731#endif
732