blob: 294f999a77e6df330dc40692166291cee7444257 [file] [log] [blame]
Greg Daniel173464d2019-02-06 15:30:34 -05001/*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTypes.h"
Greg Daniel173464d2019-02-06 15:30:34 -05009
10#if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26
11#define GL_GLEXT_PROTOTYPES
12#define EGL_EGLEXT_PROTOTYPES
13
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrAHardwareBufferUtils.h"
Greg Daniel173464d2019-02-06 15:30:34 -050015
16#include <android/hardware_buffer.h>
17
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "include/gpu/GrContext.h"
19#include "include/gpu/gl/GrGLTypes.h"
20#include "src/gpu/GrContextPriv.h"
21#include "src/gpu/gl/GrGLDefines.h"
Greg Daniel173464d2019-02-06 15:30:34 -050022
23#ifdef SK_VULKAN
Mike Kleinc0bd9f92019-04-23 12:05:21 -050024#include "src/gpu/vk/GrVkCaps.h"
25#include "src/gpu/vk/GrVkGpu.h"
Greg Daniel173464d2019-02-06 15:30:34 -050026#endif
27
28#include <EGL/egl.h>
29#include <EGL/eglext.h>
30#include <GLES/gl.h>
31#include <GLES/glext.h>
32
33#define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
34#define EGL_PROTECTED_CONTENT_EXT 0x32C0
35
36#define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
37
38namespace GrAHardwareBufferUtils {
39
40SkColorType GetSkColorTypeFromBufferFormat(uint32_t bufferFormat) {
41 switch (bufferFormat) {
42 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
Greg Daniel173464d2019-02-06 15:30:34 -050043 return kRGBA_8888_SkColorType;
Greg Daniel4b06a922019-02-14 09:43:36 -050044 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
45 return kRGB_888x_SkColorType;
Greg Daniel173464d2019-02-06 15:30:34 -050046 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
47 return kRGBA_F16_SkColorType;
48 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
49 return kRGB_565_SkColorType;
50 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
51 return kRGB_888x_SkColorType;
52 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
53 return kRGBA_1010102_SkColorType;
54 default:
55 // Given that we only use this texture as a source, colorType will not impact how Skia
56 // uses the texture. The only potential affect this is anticipated to have is that for
57 // some format types if we are not bound as an OES texture we may get invalid results
58 // for SKP capture if we read back the texture.
59 return kRGBA_8888_SkColorType;
60 }
61}
62
63GrBackendFormat GetBackendFormat(GrContext* context, AHardwareBuffer* hardwareBuffer,
64 uint32_t bufferFormat, bool requireKnownFormat) {
65 GrBackendApi backend = context->backend();
66
67 if (backend == GrBackendApi::kOpenGL) {
68 switch (bufferFormat) {
69 //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
70 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
71 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
72 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
73 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
74 return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL);
75 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
76 return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL);
77 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
78 return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL);
79 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
80 return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL);
81 default:
82 if (requireKnownFormat) {
83 return GrBackendFormat();
84 } else {
85 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
86 }
87 }
88 } else if (backend == GrBackendApi::kVulkan) {
89#ifdef SK_VULKAN
90 switch (bufferFormat) {
91 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
92 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
93 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
94 return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
95 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
96 return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
97 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
98 return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
99 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
100 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
101 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
102 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
103 default: {
104 if (requireKnownFormat) {
105 return GrBackendFormat();
106 } else {
107 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->priv().getGpu());
108 SkASSERT(gpu);
109 VkDevice device = gpu->device();
110
111 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
112 return GrBackendFormat();
113 }
114 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
115 hwbFormatProps.sType =
116 VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
117 hwbFormatProps.pNext = nullptr;
118
119 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
120 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
121 hwbProps.pNext = &hwbFormatProps;
122
123 VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device,
124 hardwareBuffer,
125 &hwbProps));
126 if (VK_SUCCESS != err) {
127 return GrBackendFormat();
128 }
129
130 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
131 return GrBackendFormat();
132 }
133
134 GrVkYcbcrConversionInfo ycbcrConversion;
135 ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel;
136 ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange;
137 ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset;
138 ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset;
139 ycbcrConversion.fForceExplicitReconstruction = VK_FALSE;
140 ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat;
141 ycbcrConversion.fExternalFormatFeatures = hwbFormatProps.formatFeatures;
142 if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
143 hwbFormatProps.formatFeatures) {
144 ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR;
145 } else {
146 ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST;
147 }
148
149 return GrBackendFormat::MakeVk(ycbcrConversion);
150 }
151 }
152 }
153#else
154 return GrBackendFormat();
155#endif
156 }
157 return GrBackendFormat();
158}
159
160class GLCleanupHelper {
161public:
162 GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
163 : fTexID(texID)
164 , fImage(image)
165 , fDisplay(display) { }
166 ~GLCleanupHelper() {
167 glDeleteTextures(1, &fTexID);
168 // eglDestroyImageKHR will remove a ref from the AHardwareBuffer
169 eglDestroyImageKHR(fDisplay, fImage);
170 }
171private:
172 GrGLuint fTexID;
173 EGLImageKHR fImage;
174 EGLDisplay fDisplay;
175};
176
177void delete_gl_texture(void* context) {
178 GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
179 delete cleanupHelper;
180}
181
182static GrBackendTexture make_gl_backend_texture(
183 GrContext* context, AHardwareBuffer* hardwareBuffer,
184 int width, int height,
185 DeleteImageProc* deleteProc,
186 DeleteImageCtx* deleteCtx,
187 bool isProtectedContent,
188 const GrBackendFormat& backendFormat,
189 bool isRenderable) {
190 while (GL_NO_ERROR != glGetError()) {} //clear GL errors
191
192 EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
193 EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
194 isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
195 isProtectedContent ? EGL_TRUE : EGL_NONE,
196 EGL_NONE };
197 EGLDisplay display = eglGetCurrentDisplay();
198 // eglCreateImageKHR will add a ref to the AHardwareBuffer
199 EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
200 clientBuffer, attribs);
201 if (EGL_NO_IMAGE_KHR == image) {
202 SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
203 return GrBackendTexture();
204 }
205
206 GrGLuint texID;
207 glGenTextures(1, &texID);
208 if (!texID) {
209 eglDestroyImageKHR(display, image);
210 return GrBackendTexture();
211 }
212
213 GrGLuint target = isRenderable ? GR_GL_TEXTURE_2D : GR_GL_TEXTURE_EXTERNAL;
214
215 glBindTexture(target, texID);
216 GLenum status = GL_NO_ERROR;
217 if ((status = glGetError()) != GL_NO_ERROR) {
218 SkDebugf("glBindTexture failed (%#x)", (int) status);
219 glDeleteTextures(1, &texID);
220 eglDestroyImageKHR(display, image);
221 return GrBackendTexture();
222 }
223 glEGLImageTargetTexture2DOES(target, image);
224 if ((status = glGetError()) != GL_NO_ERROR) {
225 SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
226 glDeleteTextures(1, &texID);
227 eglDestroyImageKHR(display, image);
228 return GrBackendTexture();
229 }
230 context->resetContext(kTextureBinding_GrGLBackendState);
231
232 GrGLTextureInfo textureInfo;
233 textureInfo.fID = texID;
234 SkASSERT(backendFormat.isValid());
235 textureInfo.fTarget = target;
236 textureInfo.fFormat = *backendFormat.getGLFormat();
237
238 *deleteProc = delete_gl_texture;
239 *deleteCtx = new GLCleanupHelper(texID, image, display);
240
241 return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo);
242}
243
244#ifdef SK_VULKAN
245class VulkanCleanupHelper {
246public:
247 VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
248 : fDevice(gpu->device())
249 , fImage(image)
250 , fMemory(memory)
251 , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
252 , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
253 ~VulkanCleanupHelper() {
254 fDestroyImage(fDevice, fImage, nullptr);
255 fFreeMemory(fDevice, fMemory, nullptr);
256 }
257private:
258 VkDevice fDevice;
259 VkImage fImage;
260 VkDeviceMemory fMemory;
261 PFN_vkDestroyImage fDestroyImage;
262 PFN_vkFreeMemory fFreeMemory;
263};
264
265void delete_vk_image(void* context) {
266 VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
267 delete cleanupHelper;
268}
269
270static GrBackendTexture make_vk_backend_texture(
271 GrContext* context, AHardwareBuffer* hardwareBuffer,
272 int width, int height,
273 DeleteImageProc* deleteProc,
274 DeleteImageCtx* deleteCtx,
275 bool isProtectedContent,
276 const GrBackendFormat& backendFormat,
277 bool isRenderable) {
278 SkASSERT(context->backend() == GrBackendApi::kVulkan);
279 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->priv().getGpu());
280
281 VkPhysicalDevice physicalDevice = gpu->physicalDevice();
282 VkDevice device = gpu->device();
283
284 SkASSERT(gpu);
285
286 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
287 return GrBackendTexture();
288 }
289
290 SkASSERT(backendFormat.getVkFormat());
291 VkFormat format = *backendFormat.getVkFormat();
292
293 VkResult err;
294
295 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
296 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
297 hwbFormatProps.pNext = nullptr;
298
299 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
300 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
301 hwbProps.pNext = &hwbFormatProps;
302
303 err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
304 if (VK_SUCCESS != err) {
305 return GrBackendTexture();
306 }
307
308 VkExternalFormatANDROID externalFormat;
309 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
310 externalFormat.pNext = nullptr;
311 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
312
313 const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo();
314 if (!ycbcrConversion) {
315 return GrBackendTexture();
316 }
317
318 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
319 // TODO: We should not assume the transfer features here and instead should have a way for
320 // Ganesh's tracking of intenral images to report whether or not they support transfers.
321 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
322 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
323 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
324 SkASSERT(!ycbcrConversion->isValid());
325 } else {
326 SkASSERT(ycbcrConversion->isValid());
327 // We have an external only format
328 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures));
329 SkASSERT(format == VK_FORMAT_UNDEFINED);
330 SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat);
331 externalFormat.externalFormat = hwbFormatProps.externalFormat;
332 }
333 SkASSERT(format == hwbFormatProps.format);
334
335 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
336 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
337 &externalFormat, // pNext
338 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
339 };
340 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
341 if (format != VK_FORMAT_UNDEFINED) {
342 usageFlags = usageFlags |
343 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
344 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
345 if (isRenderable) {
346 usageFlags = usageFlags | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
347 }
348 }
349
350 // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
351 // to use linear. Add better linear support throughout Ganesh.
352 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
353
354 const VkImageCreateInfo imageCreateInfo = {
355 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
356 &externalMemoryImageInfo, // pNext
357 0, // VkImageCreateFlags
358 VK_IMAGE_TYPE_2D, // VkImageType
359 format, // VkFormat
360 { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
361 1, // mipLevels
362 1, // arrayLayers
363 VK_SAMPLE_COUNT_1_BIT, // samples
364 tiling, // VkImageTiling
365 usageFlags, // VkImageUsageFlags
366 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
367 0, // queueFamilyCount
368 0, // pQueueFamilyIndices
369 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
370 };
371
372 VkImage image;
373 err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
374 if (VK_SUCCESS != err) {
375 return GrBackendTexture();
376 }
377
378 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
379 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
380 phyDevMemProps.pNext = nullptr;
381
382 uint32_t typeIndex = 0;
383 uint32_t heapIndex = 0;
384 bool foundHeap = false;
385 VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
386 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
387 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
388 if (hwbProps.memoryTypeBits & (1 << i)) {
389 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
390 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
391 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
392 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
393 typeIndex = i;
394 heapIndex = pdmp.memoryTypes[i].heapIndex;
395 foundHeap = true;
396 }
397 }
398 }
399 if (!foundHeap) {
400 VK_CALL(DestroyImage(device, image, nullptr));
401 return GrBackendTexture();
402 }
403
404 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
405 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
406 hwbImportInfo.pNext = nullptr;
407 hwbImportInfo.buffer = hardwareBuffer;
408
409 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
410 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
411 dedicatedAllocInfo.pNext = &hwbImportInfo;
412 dedicatedAllocInfo.image = image;
413 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
414
415 VkMemoryAllocateInfo allocInfo = {
416 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
417 &dedicatedAllocInfo, // pNext
418 hwbProps.allocationSize, // allocationSize
419 typeIndex, // memoryTypeIndex
420 };
421
422 VkDeviceMemory memory;
423
424 err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
425 if (VK_SUCCESS != err) {
426 VK_CALL(DestroyImage(device, image, nullptr));
427 return GrBackendTexture();
428 }
429
430 VkBindImageMemoryInfo bindImageInfo;
431 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
432 bindImageInfo.pNext = nullptr;
433 bindImageInfo.image = image;
434 bindImageInfo.memory = memory;
435 bindImageInfo.memoryOffset = 0;
436
437 err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
438 if (VK_SUCCESS != err) {
439 VK_CALL(DestroyImage(device, image, nullptr));
440 VK_CALL(FreeMemory(device, memory, nullptr));
441 return GrBackendTexture();
442 }
443
444 GrVkImageInfo imageInfo;
445
446 imageInfo.fImage = image;
447 imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
448 imageInfo.fImageTiling = tiling;
449 imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
450 imageInfo.fFormat = format;
451 imageInfo.fLevelCount = 1;
452 // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
453 // support that extension. Or if we know the source of the AHardwareBuffer is not from a
454 // "foreign" device we can leave them as external.
455 imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
456 imageInfo.fYcbcrConversionInfo = *ycbcrConversion;
457
458 *deleteProc = delete_vk_image;
459 *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
460
461 return GrBackendTexture(width, height, imageInfo);
462}
463#endif
464
465static bool can_import_protected_content_eglimpl() {
466 EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
467 const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
468 size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR);
469 size_t extsLen = strlen(exts);
470 bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts);
471 bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1);
472 bool atEnd = (cropExtLen+1) < extsLen
473 && !strcmp(" " PROT_CONTENT_EXT_STR,
474 exts + extsLen - (cropExtLen+1));
475 bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " ");
476 return equal || atStart || atEnd || inMiddle;
477}
478
479static bool can_import_protected_content(GrContext* context) {
480 if (GrBackendApi::kOpenGL == context->backend()) {
481 // Only compute whether the extension is present once the first time this
482 // function is called.
483 static bool hasIt = can_import_protected_content_eglimpl();
484 return hasIt;
485 }
486 return false;
487}
488
489GrBackendTexture MakeBackendTexture(GrContext* context, AHardwareBuffer* hardwareBuffer,
490 int width, int height,
491 DeleteImageProc* deleteProc,
492 DeleteImageCtx* deleteCtx,
493 bool isProtectedContent,
494 const GrBackendFormat& backendFormat,
495 bool isRenderable) {
496 if (context->abandoned()) {
497 return GrBackendTexture();
498 }
499 bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
500
501 if (GrBackendApi::kOpenGL == context->backend()) {
502 return make_gl_backend_texture(context, hardwareBuffer, width, height, deleteProc,
503 deleteCtx, createProtectedImage, backendFormat,
504 isRenderable);
505 } else {
506 SkASSERT(GrBackendApi::kVulkan == context->backend());
507#ifdef SK_VULKAN
508 // Currently we don't support protected images on vulkan
509 SkASSERT(!createProtectedImage);
510 return make_vk_backend_texture(context, hardwareBuffer, width, height, deleteProc,
511 deleteCtx, createProtectedImage, backendFormat,
512 isRenderable);
513#else
514 return GrBackendTexture();
515#endif
516 }
517}
518
519} // GrAHardwareBufferUtils
520
521#endif
522