blob: 89c2ca234921a81efd77b5f6b16795a0fa45be3b [file] [log] [blame]
Greg Daniel173464d2019-02-06 15:30:34 -05001/*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTypes.h"
Greg Daniel173464d2019-02-06 15:30:34 -05009
10#if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26
11#define GL_GLEXT_PROTOTYPES
12#define EGL_EGLEXT_PROTOTYPES
13
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrAHardwareBufferUtils.h"
Greg Daniel173464d2019-02-06 15:30:34 -050015
16#include <android/hardware_buffer.h>
Brian Salomond4764a12019-08-08 12:08:24 -040017#include <EGL/egl.h>
18#include <EGL/eglext.h>
19#include <GLES/gl.h>
20#include <GLES/glext.h>
Greg Daniel173464d2019-02-06 15:30:34 -050021
Adlai Holler29405382020-07-20 16:02:05 -040022#include "include/gpu/GrDirectContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050023#include "include/gpu/gl/GrGLTypes.h"
Adlai Hollera0693042020-10-14 11:23:11 -040024#include "src/gpu/GrDirectContextPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/gl/GrGLDefines.h"
Brian Salomond4764a12019-08-08 12:08:24 -040026#include "src/gpu/gl/GrGLUtil.h"
Greg Daniel173464d2019-02-06 15:30:34 -050027
28#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "src/gpu/vk/GrVkCaps.h"
30#include "src/gpu/vk/GrVkGpu.h"
Greg Daniel173464d2019-02-06 15:30:34 -050031#endif
32
Greg Daniel173464d2019-02-06 15:30:34 -050033#define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
34#define EGL_PROTECTED_CONTENT_EXT 0x32C0
35
Greg Daniel02497d42020-02-21 15:46:27 -050036#define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X
Greg Daniel173464d2019-02-06 15:30:34 -050037
38namespace GrAHardwareBufferUtils {
39
40SkColorType GetSkColorTypeFromBufferFormat(uint32_t bufferFormat) {
41 switch (bufferFormat) {
42 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
Greg Daniel173464d2019-02-06 15:30:34 -050043 return kRGBA_8888_SkColorType;
Greg Daniel4b06a922019-02-14 09:43:36 -050044 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
45 return kRGB_888x_SkColorType;
Greg Daniel173464d2019-02-06 15:30:34 -050046 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
47 return kRGBA_F16_SkColorType;
48 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
49 return kRGB_565_SkColorType;
50 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
51 return kRGB_888x_SkColorType;
52 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
53 return kRGBA_1010102_SkColorType;
54 default:
55 // Given that we only use this texture as a source, colorType will not impact how Skia
56 // uses the texture. The only potential affect this is anticipated to have is that for
57 // some format types if we are not bound as an OES texture we may get invalid results
58 // for SKP capture if we read back the texture.
59 return kRGBA_8888_SkColorType;
60 }
61}
62
Adlai Holler1fc76ce2020-10-07 11:36:49 -040063GrBackendFormat GetBackendFormat(GrDirectContext* dContext, AHardwareBuffer* hardwareBuffer,
Greg Daniel173464d2019-02-06 15:30:34 -050064 uint32_t bufferFormat, bool requireKnownFormat) {
Adlai Holler29405382020-07-20 16:02:05 -040065 GrBackendApi backend = dContext->backend();
Greg Daniel173464d2019-02-06 15:30:34 -050066
67 if (backend == GrBackendApi::kOpenGL) {
68 switch (bufferFormat) {
69 //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
70 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
71 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
72 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
73 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
74 return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL);
75 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
76 return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL);
77 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
78 return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL);
79 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
80 return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL);
81 default:
82 if (requireKnownFormat) {
83 return GrBackendFormat();
84 } else {
85 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
86 }
87 }
88 } else if (backend == GrBackendApi::kVulkan) {
89#ifdef SK_VULKAN
90 switch (bufferFormat) {
91 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
92 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
93 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
94 return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
95 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
96 return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
97 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
98 return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
99 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
100 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
101 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
102 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
103 default: {
104 if (requireKnownFormat) {
105 return GrBackendFormat();
106 } else {
Adlai Holler29405382020-07-20 16:02:05 -0400107 GrVkGpu* gpu = static_cast<GrVkGpu*>(dContext->priv().getGpu());
Greg Daniel173464d2019-02-06 15:30:34 -0500108 SkASSERT(gpu);
109 VkDevice device = gpu->device();
110
111 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
112 return GrBackendFormat();
113 }
114 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
115 hwbFormatProps.sType =
116 VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
117 hwbFormatProps.pNext = nullptr;
118
119 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
120 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
121 hwbProps.pNext = &hwbFormatProps;
122
123 VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device,
124 hardwareBuffer,
125 &hwbProps));
126 if (VK_SUCCESS != err) {
127 return GrBackendFormat();
128 }
129
130 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
131 return GrBackendFormat();
132 }
133
134 GrVkYcbcrConversionInfo ycbcrConversion;
135 ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel;
136 ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange;
137 ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset;
138 ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset;
139 ycbcrConversion.fForceExplicitReconstruction = VK_FALSE;
140 ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat;
Sergey Ulanov2739fd22019-08-11 22:46:33 -0700141 ycbcrConversion.fFormatFeatures = hwbFormatProps.formatFeatures;
Greg Daniel173464d2019-02-06 15:30:34 -0500142 if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
143 hwbFormatProps.formatFeatures) {
144 ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR;
145 } else {
146 ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST;
147 }
148
149 return GrBackendFormat::MakeVk(ycbcrConversion);
150 }
151 }
152 }
153#else
154 return GrBackendFormat();
155#endif
156 }
157 return GrBackendFormat();
158}
159
Stan Ilieva56b04a2019-08-01 14:22:34 -0400160class GLTextureHelper {
Greg Daniel173464d2019-02-06 15:30:34 -0500161public:
Stan Ilieva56b04a2019-08-01 14:22:34 -0400162 GLTextureHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display, GrGLuint texTarget)
Greg Daniel173464d2019-02-06 15:30:34 -0500163 : fTexID(texID)
164 , fImage(image)
Stan Ilieva56b04a2019-08-01 14:22:34 -0400165 , fDisplay(display)
166 , fTexTarget(texTarget) { }
167 ~GLTextureHelper() {
Greg Daniel173464d2019-02-06 15:30:34 -0500168 glDeleteTextures(1, &fTexID);
169 // eglDestroyImageKHR will remove a ref from the AHardwareBuffer
170 eglDestroyImageKHR(fDisplay, fImage);
171 }
Adlai Holler29405382020-07-20 16:02:05 -0400172 void rebind(GrDirectContext*);
Stan Ilieva56b04a2019-08-01 14:22:34 -0400173
Greg Daniel173464d2019-02-06 15:30:34 -0500174private:
175 GrGLuint fTexID;
176 EGLImageKHR fImage;
177 EGLDisplay fDisplay;
Stan Ilieva56b04a2019-08-01 14:22:34 -0400178 GrGLuint fTexTarget;
Greg Daniel173464d2019-02-06 15:30:34 -0500179};
180
Adlai Holler29405382020-07-20 16:02:05 -0400181void GLTextureHelper::rebind(GrDirectContext* dContext) {
Stan Ilieva56b04a2019-08-01 14:22:34 -0400182 glBindTexture(fTexTarget, fTexID);
183 GLenum status = GL_NO_ERROR;
184 if ((status = glGetError()) != GL_NO_ERROR) {
185 SkDebugf("glBindTexture(%#x, %d) failed (%#x)", (int) fTexTarget,
186 (int) fTexID, (int) status);
187 return;
188 }
189 glEGLImageTargetTexture2DOES(fTexTarget, fImage);
190 if ((status = glGetError()) != GL_NO_ERROR) {
191 SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
192 return;
193 }
Adlai Holler29405382020-07-20 16:02:05 -0400194 dContext->resetContext(kTextureBinding_GrGLBackendState);
Stan Ilieva56b04a2019-08-01 14:22:34 -0400195}
196
Greg Daniel173464d2019-02-06 15:30:34 -0500197void delete_gl_texture(void* context) {
Stan Ilieva56b04a2019-08-01 14:22:34 -0400198 GLTextureHelper* cleanupHelper = static_cast<GLTextureHelper*>(context);
Greg Daniel173464d2019-02-06 15:30:34 -0500199 delete cleanupHelper;
200}
201
Adlai Holler1fc76ce2020-10-07 11:36:49 -0400202void update_gl_texture(void* context, GrDirectContext* dContext) {
Stan Ilieva56b04a2019-08-01 14:22:34 -0400203 GLTextureHelper* cleanupHelper = static_cast<GLTextureHelper*>(context);
Adlai Holler29405382020-07-20 16:02:05 -0400204 cleanupHelper->rebind(dContext);
Stan Ilieva56b04a2019-08-01 14:22:34 -0400205}
206
Greg Daniel173464d2019-02-06 15:30:34 -0500207static GrBackendTexture make_gl_backend_texture(
Adlai Holler29405382020-07-20 16:02:05 -0400208 GrDirectContext* dContext, AHardwareBuffer* hardwareBuffer,
Greg Daniel173464d2019-02-06 15:30:34 -0500209 int width, int height,
210 DeleteImageProc* deleteProc,
Stan Ilieva56b04a2019-08-01 14:22:34 -0400211 UpdateImageProc* updateProc,
212 TexImageCtx* imageCtx,
Greg Daniel173464d2019-02-06 15:30:34 -0500213 bool isProtectedContent,
214 const GrBackendFormat& backendFormat,
215 bool isRenderable) {
216 while (GL_NO_ERROR != glGetError()) {} //clear GL errors
217
218 EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
219 EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
220 isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
221 isProtectedContent ? EGL_TRUE : EGL_NONE,
222 EGL_NONE };
223 EGLDisplay display = eglGetCurrentDisplay();
224 // eglCreateImageKHR will add a ref to the AHardwareBuffer
225 EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
226 clientBuffer, attribs);
227 if (EGL_NO_IMAGE_KHR == image) {
228 SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
229 return GrBackendTexture();
230 }
231
232 GrGLuint texID;
233 glGenTextures(1, &texID);
234 if (!texID) {
235 eglDestroyImageKHR(display, image);
236 return GrBackendTexture();
237 }
238
239 GrGLuint target = isRenderable ? GR_GL_TEXTURE_2D : GR_GL_TEXTURE_EXTERNAL;
240
241 glBindTexture(target, texID);
242 GLenum status = GL_NO_ERROR;
243 if ((status = glGetError()) != GL_NO_ERROR) {
244 SkDebugf("glBindTexture failed (%#x)", (int) status);
245 glDeleteTextures(1, &texID);
246 eglDestroyImageKHR(display, image);
247 return GrBackendTexture();
248 }
249 glEGLImageTargetTexture2DOES(target, image);
250 if ((status = glGetError()) != GL_NO_ERROR) {
251 SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
252 glDeleteTextures(1, &texID);
253 eglDestroyImageKHR(display, image);
254 return GrBackendTexture();
255 }
Adlai Holler29405382020-07-20 16:02:05 -0400256 dContext->resetContext(kTextureBinding_GrGLBackendState);
Greg Daniel173464d2019-02-06 15:30:34 -0500257
258 GrGLTextureInfo textureInfo;
259 textureInfo.fID = texID;
260 SkASSERT(backendFormat.isValid());
261 textureInfo.fTarget = target;
Brian Salomond4764a12019-08-08 12:08:24 -0400262 textureInfo.fFormat = GrGLFormatToEnum(backendFormat.asGLFormat());
Greg Daniel173464d2019-02-06 15:30:34 -0500263
264 *deleteProc = delete_gl_texture;
Stan Ilieva56b04a2019-08-01 14:22:34 -0400265 *updateProc = update_gl_texture;
266 *imageCtx = new GLTextureHelper(texID, image, display, target);
Greg Daniel173464d2019-02-06 15:30:34 -0500267
Brian Salomon7e67dca2020-07-21 09:27:25 -0400268 return GrBackendTexture(width, height, GrMipmapped::kNo, textureInfo);
Greg Daniel173464d2019-02-06 15:30:34 -0500269}
270
271#ifdef SK_VULKAN
272class VulkanCleanupHelper {
273public:
274 VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
275 : fDevice(gpu->device())
276 , fImage(image)
277 , fMemory(memory)
278 , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
279 , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
280 ~VulkanCleanupHelper() {
281 fDestroyImage(fDevice, fImage, nullptr);
282 fFreeMemory(fDevice, fMemory, nullptr);
283 }
284private:
285 VkDevice fDevice;
286 VkImage fImage;
287 VkDeviceMemory fMemory;
288 PFN_vkDestroyImage fDestroyImage;
289 PFN_vkFreeMemory fFreeMemory;
290};
291
292void delete_vk_image(void* context) {
293 VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
294 delete cleanupHelper;
295}
296
Adlai Holler1fc76ce2020-10-07 11:36:49 -0400297void update_vk_image(void* context, GrDirectContext* dContext) {
Stan Ilieva56b04a2019-08-01 14:22:34 -0400298 // no op
299}
300
Greg Daniel173464d2019-02-06 15:30:34 -0500301static GrBackendTexture make_vk_backend_texture(
Adlai Holler29405382020-07-20 16:02:05 -0400302 GrDirectContext* dContext, AHardwareBuffer* hardwareBuffer,
Greg Daniel173464d2019-02-06 15:30:34 -0500303 int width, int height,
304 DeleteImageProc* deleteProc,
Stan Ilieva56b04a2019-08-01 14:22:34 -0400305 UpdateImageProc* updateProc,
306 TexImageCtx* imageCtx,
Greg Daniel173464d2019-02-06 15:30:34 -0500307 bool isProtectedContent,
308 const GrBackendFormat& backendFormat,
309 bool isRenderable) {
Adlai Holler29405382020-07-20 16:02:05 -0400310 SkASSERT(dContext->backend() == GrBackendApi::kVulkan);
311 GrVkGpu* gpu = static_cast<GrVkGpu*>(dContext->priv().getGpu());
Greg Daniel173464d2019-02-06 15:30:34 -0500312
Greg Danielb7fc7a42020-10-30 10:13:50 -0400313 SkASSERT(!isProtectedContent || gpu->protectedContext());
314
Greg Daniel173464d2019-02-06 15:30:34 -0500315 VkPhysicalDevice physicalDevice = gpu->physicalDevice();
316 VkDevice device = gpu->device();
317
318 SkASSERT(gpu);
319
320 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
321 return GrBackendTexture();
322 }
323
Brian Salomond4764a12019-08-08 12:08:24 -0400324 VkFormat format;
325 SkAssertResult(backendFormat.asVkFormat(&format));
Greg Daniel173464d2019-02-06 15:30:34 -0500326
327 VkResult err;
328
329 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
330 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
331 hwbFormatProps.pNext = nullptr;
332
333 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
334 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
335 hwbProps.pNext = &hwbFormatProps;
336
337 err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
338 if (VK_SUCCESS != err) {
339 return GrBackendTexture();
340 }
341
342 VkExternalFormatANDROID externalFormat;
343 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
344 externalFormat.pNext = nullptr;
345 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
346
347 const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo();
348 if (!ycbcrConversion) {
349 return GrBackendTexture();
350 }
351
352 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
353 // TODO: We should not assume the transfer features here and instead should have a way for
354 // Ganesh's tracking of intenral images to report whether or not they support transfers.
355 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
356 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
357 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
358 SkASSERT(!ycbcrConversion->isValid());
359 } else {
360 SkASSERT(ycbcrConversion->isValid());
361 // We have an external only format
362 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures));
363 SkASSERT(format == VK_FORMAT_UNDEFINED);
364 SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat);
365 externalFormat.externalFormat = hwbFormatProps.externalFormat;
366 }
367 SkASSERT(format == hwbFormatProps.format);
368
369 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
370 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
371 &externalFormat, // pNext
372 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
373 };
374 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
375 if (format != VK_FORMAT_UNDEFINED) {
376 usageFlags = usageFlags |
377 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
378 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
379 if (isRenderable) {
380 usageFlags = usageFlags | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
381 }
382 }
383
384 // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
385 // to use linear. Add better linear support throughout Ganesh.
386 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
387
Greg Danielb7fc7a42020-10-30 10:13:50 -0400388 VkImageCreateFlags flags = isProtectedContent ? VK_IMAGE_CREATE_PROTECTED_BIT : 0;
389
Greg Daniel173464d2019-02-06 15:30:34 -0500390 const VkImageCreateInfo imageCreateInfo = {
391 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
392 &externalMemoryImageInfo, // pNext
Greg Danielb7fc7a42020-10-30 10:13:50 -0400393 flags, // VkImageCreateFlags
Greg Daniel173464d2019-02-06 15:30:34 -0500394 VK_IMAGE_TYPE_2D, // VkImageType
395 format, // VkFormat
396 { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
397 1, // mipLevels
398 1, // arrayLayers
399 VK_SAMPLE_COUNT_1_BIT, // samples
400 tiling, // VkImageTiling
401 usageFlags, // VkImageUsageFlags
402 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
403 0, // queueFamilyCount
404 0, // pQueueFamilyIndices
405 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
406 };
407
408 VkImage image;
409 err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
410 if (VK_SUCCESS != err) {
411 return GrBackendTexture();
412 }
413
414 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
415 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
416 phyDevMemProps.pNext = nullptr;
417
418 uint32_t typeIndex = 0;
419 uint32_t heapIndex = 0;
420 bool foundHeap = false;
421 VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
422 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
423 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
424 if (hwbProps.memoryTypeBits & (1 << i)) {
425 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
426 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
427 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
428 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
429 typeIndex = i;
430 heapIndex = pdmp.memoryTypes[i].heapIndex;
431 foundHeap = true;
432 }
433 }
434 }
435 if (!foundHeap) {
436 VK_CALL(DestroyImage(device, image, nullptr));
437 return GrBackendTexture();
438 }
439
440 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
441 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
442 hwbImportInfo.pNext = nullptr;
443 hwbImportInfo.buffer = hardwareBuffer;
444
445 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
446 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
447 dedicatedAllocInfo.pNext = &hwbImportInfo;
448 dedicatedAllocInfo.image = image;
449 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
450
451 VkMemoryAllocateInfo allocInfo = {
452 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
453 &dedicatedAllocInfo, // pNext
454 hwbProps.allocationSize, // allocationSize
455 typeIndex, // memoryTypeIndex
456 };
457
458 VkDeviceMemory memory;
459
460 err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
461 if (VK_SUCCESS != err) {
462 VK_CALL(DestroyImage(device, image, nullptr));
463 return GrBackendTexture();
464 }
465
466 VkBindImageMemoryInfo bindImageInfo;
467 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
468 bindImageInfo.pNext = nullptr;
469 bindImageInfo.image = image;
470 bindImageInfo.memory = memory;
471 bindImageInfo.memoryOffset = 0;
472
473 err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
474 if (VK_SUCCESS != err) {
475 VK_CALL(DestroyImage(device, image, nullptr));
476 VK_CALL(FreeMemory(device, memory, nullptr));
477 return GrBackendTexture();
478 }
479
Greg Daniel6a4e1452020-08-20 14:35:18 -0400480 GrVkAlloc alloc;
481 alloc.fMemory = memory;
482 alloc.fOffset = 0;
483 alloc.fSize = hwbProps.allocationSize;
484 alloc.fFlags = 0;
Greg Daniel173464d2019-02-06 15:30:34 -0500485
Greg Daniel6a4e1452020-08-20 14:35:18 -0400486 GrVkImageInfo imageInfo;
Greg Daniel173464d2019-02-06 15:30:34 -0500487 imageInfo.fImage = image;
Greg Daniel6a4e1452020-08-20 14:35:18 -0400488 imageInfo.fAlloc = alloc;
Greg Daniel173464d2019-02-06 15:30:34 -0500489 imageInfo.fImageTiling = tiling;
490 imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
491 imageInfo.fFormat = format;
492 imageInfo.fLevelCount = 1;
493 // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
494 // support that extension. Or if we know the source of the AHardwareBuffer is not from a
495 // "foreign" device we can leave them as external.
496 imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
Greg Danielb7fc7a42020-10-30 10:13:50 -0400497 imageInfo.fProtected = isProtectedContent ? GrProtected::kYes : GrProtected::kNo;
Greg Daniel173464d2019-02-06 15:30:34 -0500498 imageInfo.fYcbcrConversionInfo = *ycbcrConversion;
Greg Danielaa9d99f2020-06-02 11:10:41 -0400499 imageInfo.fSharingMode = imageCreateInfo.sharingMode;
Greg Daniel173464d2019-02-06 15:30:34 -0500500
501 *deleteProc = delete_vk_image;
Stan Ilieva56b04a2019-08-01 14:22:34 -0400502 *updateProc = update_vk_image;
503 *imageCtx = new VulkanCleanupHelper(gpu, image, memory);
Greg Daniel173464d2019-02-06 15:30:34 -0500504
505 return GrBackendTexture(width, height, imageInfo);
506}
507#endif
508
509static bool can_import_protected_content_eglimpl() {
510 EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
511 const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
512 size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR);
513 size_t extsLen = strlen(exts);
514 bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts);
515 bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1);
516 bool atEnd = (cropExtLen+1) < extsLen
517 && !strcmp(" " PROT_CONTENT_EXT_STR,
518 exts + extsLen - (cropExtLen+1));
519 bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " ");
520 return equal || atStart || atEnd || inMiddle;
521}
522
Adlai Holler29405382020-07-20 16:02:05 -0400523static bool can_import_protected_content(GrDirectContext* dContext) {
524 if (GrBackendApi::kOpenGL == dContext->backend()) {
Greg Daniel173464d2019-02-06 15:30:34 -0500525 // Only compute whether the extension is present once the first time this
526 // function is called.
527 static bool hasIt = can_import_protected_content_eglimpl();
528 return hasIt;
Greg Danielb7fc7a42020-10-30 10:13:50 -0400529 } else if (GrBackendApi::kVulkan == dContext->backend()) {
Greg Danieleed4cba2020-10-30 12:23:11 -0400530#ifdef SK_VULKAN
Greg Danielb7fc7a42020-10-30 10:13:50 -0400531 return static_cast<GrVkGpu*>(dContext->priv().getGpu())->protectedContext();
Greg Danieleed4cba2020-10-30 12:23:11 -0400532#endif
Greg Daniel173464d2019-02-06 15:30:34 -0500533 }
534 return false;
535}
536
Adlai Holler1fc76ce2020-10-07 11:36:49 -0400537GrBackendTexture MakeBackendTexture(GrDirectContext* dContext, AHardwareBuffer* hardwareBuffer,
Greg Daniel173464d2019-02-06 15:30:34 -0500538 int width, int height,
539 DeleteImageProc* deleteProc,
Stan Ilieva56b04a2019-08-01 14:22:34 -0400540 UpdateImageProc* updateProc,
541 TexImageCtx* imageCtx,
Greg Daniel173464d2019-02-06 15:30:34 -0500542 bool isProtectedContent,
543 const GrBackendFormat& backendFormat,
544 bool isRenderable) {
Adlai Holler29405382020-07-20 16:02:05 -0400545 SkASSERT(dContext);
546 if (!dContext || dContext->abandoned()) {
Greg Daniel173464d2019-02-06 15:30:34 -0500547 return GrBackendTexture();
548 }
Adlai Holler29405382020-07-20 16:02:05 -0400549 bool createProtectedImage = isProtectedContent && can_import_protected_content(dContext);
Greg Daniel173464d2019-02-06 15:30:34 -0500550
Adlai Holler29405382020-07-20 16:02:05 -0400551 if (GrBackendApi::kOpenGL == dContext->backend()) {
552 return make_gl_backend_texture(dContext, hardwareBuffer, width, height, deleteProc,
Stan Ilieva56b04a2019-08-01 14:22:34 -0400553 updateProc, imageCtx, createProtectedImage, backendFormat,
Greg Daniel173464d2019-02-06 15:30:34 -0500554 isRenderable);
555 } else {
Adlai Holler29405382020-07-20 16:02:05 -0400556 SkASSERT(GrBackendApi::kVulkan == dContext->backend());
Greg Daniel173464d2019-02-06 15:30:34 -0500557#ifdef SK_VULKAN
Adlai Holler29405382020-07-20 16:02:05 -0400558 return make_vk_backend_texture(dContext, hardwareBuffer, width, height, deleteProc,
Stan Ilieva56b04a2019-08-01 14:22:34 -0400559 updateProc, imageCtx, createProtectedImage, backendFormat,
Greg Daniel173464d2019-02-06 15:30:34 -0500560 isRenderable);
561#else
562 return GrBackendTexture();
563#endif
564 }
565}
566
Greg Daniel173464d2019-02-06 15:30:34 -0500567} // GrAHardwareBufferUtils
568
569#endif
570