blob: 7f99b3c44f29d8d4fce2ef67535c5cf362236d63 [file] [log] [blame]
Stan Iliev7e910df2017-06-02 10:29:21 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7#include "SkTypes.h"
8
Derek Sollenberger7a869872017-06-27 15:37:25 -04009
10#if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26
Stan Iliev7e910df2017-06-02 10:29:21 -040011#define GL_GLEXT_PROTOTYPES
12#define EGL_EGLEXT_PROTOTYPES
13#include "GrAHardwareBufferImageGenerator.h"
14
Derek Sollenberger7a869872017-06-27 15:37:25 -040015#include <android/hardware_buffer.h>
16
Stan Iliev7e910df2017-06-02 10:29:21 -040017#include "GrBackendSurface.h"
18#include "GrContext.h"
19#include "GrContextPriv.h"
Robert Phillipsadbe1322018-01-17 13:35:46 -050020#include "GrProxyProvider.h"
Stan Ilievdbba55d2017-06-28 13:24:41 -040021#include "GrResourceCache.h"
Robert Phillips00018282017-06-15 15:35:16 -040022#include "GrResourceProvider.h"
Greg Daniel637c06a2018-09-12 09:44:25 -040023#include "GrResourceProviderPriv.h"
Robert Phillips847d4c52017-06-13 18:21:44 -040024#include "GrTexture.h"
Robert Phillipsade9f612017-06-16 07:32:43 -040025#include "GrTextureProxy.h"
Stan Ilievdbba55d2017-06-28 13:24:41 -040026#include "SkMessageBus.h"
Greg Danielf1251112018-08-27 09:55:03 -040027#include "gl/GrGLDefines.h"
28#include "gl/GrGLTypes.h"
Stan Iliev7e910df2017-06-02 10:29:21 -040029
30#include <EGL/egl.h>
31#include <EGL/eglext.h>
32#include <GLES/gl.h>
33#include <GLES/glext.h>
34
Greg Daniel637c06a2018-09-12 09:44:25 -040035#ifdef SK_VULKAN
36#include "vk/GrVkExtensions.h"
37#include "vk/GrVkGpu.h"
38#endif
39
Stan Ilievc01b5c72018-08-28 10:18:19 -040040#define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
41#define EGL_PROTECTED_CONTENT_EXT 0x32C0
42
43static bool can_import_protected_content_eglimpl() {
44 EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
45 const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
46 size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR);
47 size_t extsLen = strlen(exts);
48 bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts);
49 bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1);
50 bool atEnd = (cropExtLen+1) < extsLen
51 && !strcmp(" " PROT_CONTENT_EXT_STR,
52 exts + extsLen - (cropExtLen+1));
53 bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " ");
54 return equal || atStart || atEnd || inMiddle;
55}
56
57static bool can_import_protected_content(GrContext* context) {
58 if (kOpenGL_GrBackend == context->contextPriv().getBackend()) {
59 // Only compute whether the extension is present once the first time this
60 // function is called.
61 static bool hasIt = can_import_protected_content_eglimpl();
62 return hasIt;
63 }
64 return false;
65}
66
Stan Iliev7e910df2017-06-02 10:29:21 -040067std::unique_ptr<SkImageGenerator> GrAHardwareBufferImageGenerator::Make(
Stan Iliev505dd572018-09-13 14:20:03 -040068 AHardwareBuffer* graphicBuffer, SkAlphaType alphaType, sk_sp<SkColorSpace> colorSpace,
69 GrSurfaceOrigin surfaceOrigin) {
Stan Iliev7e910df2017-06-02 10:29:21 -040070 AHardwareBuffer_Desc bufferDesc;
71 AHardwareBuffer_describe(graphicBuffer, &bufferDesc);
72 SkColorType colorType;
73 switch (bufferDesc.format) {
74 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
75 colorType = kRGBA_8888_SkColorType;
76 break;
77 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
78 colorType = kRGBA_F16_SkColorType;
79 break;
80 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
81 colorType = kRGB_565_SkColorType;
82 break;
Stan Iliev114b0912018-08-31 14:02:55 -040083 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
84 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
85 colorType = kRGB_888x_SkColorType;
86 break;
87 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
88 colorType = kRGBA_1010102_SkColorType;
89 break;
Stan Iliev7e910df2017-06-02 10:29:21 -040090 default:
Stan Iliev114b0912018-08-31 14:02:55 -040091 // Given that we only use this texture as a source, colorType will not impact how Skia uses
92 // the texture. The only potential affect this is anticipated to have is that for some
93 // format types if we are not bound as an OES texture we may get invalid results for SKP
94 // capture if we read back the texture.
95 colorType = kRGBA_8888_SkColorType;
96 break;
Stan Iliev7e910df2017-06-02 10:29:21 -040097 }
98 SkImageInfo info = SkImageInfo::Make(bufferDesc.width, bufferDesc.height, colorType,
99 alphaType, std::move(colorSpace));
Stan Ilievc01b5c72018-08-28 10:18:19 -0400100 bool createProtectedImage = 0 != (bufferDesc.usage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT);
Stan Iliev505dd572018-09-13 14:20:03 -0400101 return std::unique_ptr<SkImageGenerator>(new GrAHardwareBufferImageGenerator(
102 info, graphicBuffer, alphaType, createProtectedImage,
103 bufferDesc.format, surfaceOrigin));
Stan Iliev7e910df2017-06-02 10:29:21 -0400104}
105
106GrAHardwareBufferImageGenerator::GrAHardwareBufferImageGenerator(const SkImageInfo& info,
Stan Iliev114b0912018-08-31 14:02:55 -0400107 AHardwareBuffer* hardwareBuffer, SkAlphaType alphaType, bool isProtectedContent,
Stan Iliev505dd572018-09-13 14:20:03 -0400108 uint32_t bufferFormat, GrSurfaceOrigin surfaceOrigin)
Stan Iliev7e910df2017-06-02 10:29:21 -0400109 : INHERITED(info)
Stan Ilievc01b5c72018-08-28 10:18:19 -0400110 , fHardwareBuffer(hardwareBuffer)
Stan Iliev114b0912018-08-31 14:02:55 -0400111 , fBufferFormat(bufferFormat)
Stan Iliev505dd572018-09-13 14:20:03 -0400112 , fIsProtectedContent(isProtectedContent)
113 , fSurfaceOrigin(surfaceOrigin) {
Greg Danielf1251112018-08-27 09:55:03 -0400114 AHardwareBuffer_acquire(fHardwareBuffer);
115}
116
Stan Iliev7e910df2017-06-02 10:29:21 -0400117GrAHardwareBufferImageGenerator::~GrAHardwareBufferImageGenerator() {
Greg Danielf1251112018-08-27 09:55:03 -0400118 AHardwareBuffer_release(fHardwareBuffer);
Stan Iliev7e910df2017-06-02 10:29:21 -0400119}
120
Stan Iliev7e910df2017-06-02 10:29:21 -0400121///////////////////////////////////////////////////////////////////////////////////////////////////
122
Greg Daniel637c06a2018-09-12 09:44:25 -0400123#ifdef SK_VULKAN
124
125class VulkanCleanupHelper {
126public:
127 VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
128 : fDevice(gpu->device())
129 , fImage(image)
130 , fMemory(memory)
131 , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
132 , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
133 ~VulkanCleanupHelper() {
134 fDestroyImage(fDevice, fImage, nullptr);
135 fFreeMemory(fDevice, fMemory, nullptr);
136 }
137private:
138 VkDevice fDevice;
139 VkImage fImage;
140 VkDeviceMemory fMemory;
141 PFN_vkDestroyImage fDestroyImage;
142 PFN_vkFreeMemory fFreeMemory;
143};
144
145void GrAHardwareBufferImageGenerator::DeleteVkImage(void* context) {
146 VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
147 delete cleanupHelper;
148}
149
150#define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
151
152static GrBackendTexture make_vk_backend_texture(
153 GrContext* context, AHardwareBuffer* hardwareBuffer,
154 int width, int height, GrPixelConfig config,
155 GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
156 GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
157 bool isProtectedContent,
158 const GrBackendFormat& backendFormat) {
159 SkASSERT(context->contextPriv().getBackend() == kVulkan_GrBackend);
160 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
161
162 VkPhysicalDevice physicalDevice = gpu->physicalDevice();
163 VkDevice device = gpu->device();
164
165 SkASSERT(gpu);
166
167 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
168 return GrBackendTexture();
169 }
170
171 SkASSERT(backendFormat.getVkFormat());
172 VkFormat format = *backendFormat.getVkFormat();
173
174 VkResult err;
175
176 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
177 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
178 hwbFormatProps.pNext = nullptr;
179
180 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
181 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
182 hwbProps.pNext = &hwbFormatProps;
183
184 err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
185 if (VK_SUCCESS != err) {
186 return GrBackendTexture();
187 }
188
189 SkASSERT(format == hwbFormatProps.format);
190 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
191 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
192 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
193
194 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
195 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
196 nullptr, // pNext
197 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
198 };
199 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
200 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
201 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
202
203 // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
204 // to use linear. Add better linear support throughout Ganesh.
205 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
206
207 const VkImageCreateInfo imageCreateInfo = {
208 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
209 &externalMemoryImageInfo, // pNext
210 0, // VkImageCreateFlags
211 VK_IMAGE_TYPE_2D, // VkImageType
212 format, // VkFormat
213 { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
214 1, // mipLevels
215 1, // arrayLayers
216 VK_SAMPLE_COUNT_1_BIT, // samples
217 tiling, // VkImageTiling
218 usageFlags, // VkImageUsageFlags
219 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
220 0, // queueFamilyCount
221 0, // pQueueFamilyIndices
222 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
223 };
224
225 VkImage image;
226 err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
227 if (VK_SUCCESS != err) {
228 return GrBackendTexture();
229 }
230
231 VkImageMemoryRequirementsInfo2 memReqsInfo;
232 memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
233 memReqsInfo.pNext = nullptr;
234 memReqsInfo.image = image;
235
236 VkMemoryDedicatedRequirements dedicatedMemReqs;
237 dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
238 dedicatedMemReqs.pNext = nullptr;
239
240 VkMemoryRequirements2 memReqs;
241 memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
242 memReqs.pNext = &dedicatedMemReqs;
243
244 VK_CALL(GetImageMemoryRequirements2(device, &memReqsInfo, &memReqs));
245 SkASSERT(VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation);
246
247 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
248 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
249 phyDevMemProps.pNext = nullptr;
250
251 uint32_t typeIndex = 0;
252 uint32_t heapIndex = 0;
253 bool foundHeap = false;
254 VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
255 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
256 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
257 if (hwbProps.memoryTypeBits & (1 << i)) {
258 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
259 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
260 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
261 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
262 typeIndex = i;
263 heapIndex = pdmp.memoryTypes[i].heapIndex;
264 foundHeap = true;
265 }
266 }
267 }
268 if (!foundHeap) {
269 VK_CALL(DestroyImage(device, image, nullptr));
270 return GrBackendTexture();
271 }
272
273 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
274 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
275 hwbImportInfo.pNext = nullptr;
276 hwbImportInfo.buffer = hardwareBuffer;
277
278 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
279 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
280 dedicatedAllocInfo.pNext = &hwbImportInfo;
281 dedicatedAllocInfo.image = image;
282 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
283
284 VkMemoryAllocateInfo allocInfo = {
285 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
286 &dedicatedAllocInfo, // pNext
287 hwbProps.allocationSize, // allocationSize
288 typeIndex, // memoryTypeIndex
289 };
290
291 VkDeviceMemory memory;
292
293 err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
294 if (VK_SUCCESS != err) {
295 VK_CALL(DestroyImage(device, image, nullptr));
296 return GrBackendTexture();
297 }
298
299 VkBindImageMemoryInfo bindImageInfo;
300 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
301 bindImageInfo.pNext = nullptr;
302 bindImageInfo.image = image;
303 bindImageInfo.memory = memory;
304 bindImageInfo.memoryOffset = 0;
305
306 err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
307 if (VK_SUCCESS != err) {
308 VK_CALL(DestroyImage(device, image, nullptr));
309 VK_CALL(FreeMemory(device, memory, nullptr));
310 return GrBackendTexture();
311 }
312
313 GrVkImageInfo imageInfo;
314
315 imageInfo.fImage = image;
316 imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
317 imageInfo.fImageTiling = tiling;
318 imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
319 imageInfo.fFormat = format;
320 imageInfo.fLevelCount = 1;
321 // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
322 // support that extension. Or if we know the source of the AHardwareBuffer is not from a
323 // "foreign" device we can leave them as external.
324 imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
325
326 *deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage;
327 *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
328
329 return GrBackendTexture(width, height, imageInfo);
330}
331#endif
332
Greg Daniel3860cfd2018-09-07 09:13:54 -0400333class GLCleanupHelper {
Greg Daniel9af948d2018-08-27 09:53:51 -0400334public:
Greg Daniel3860cfd2018-09-07 09:13:54 -0400335 GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
336 : fTexID(texID)
337 , fImage(image)
Greg Daniel9af948d2018-08-27 09:53:51 -0400338 , fDisplay(display) { }
Greg Daniel3860cfd2018-09-07 09:13:54 -0400339 ~GLCleanupHelper() {
340 glDeleteTextures(1, &fTexID);
Greg Danielf1251112018-08-27 09:55:03 -0400341 // eglDestroyImageKHR will remove a ref from the AHardwareBuffer
Greg Daniel9af948d2018-08-27 09:53:51 -0400342 eglDestroyImageKHR(fDisplay, fImage);
343 }
344private:
Greg Daniel3860cfd2018-09-07 09:13:54 -0400345 GrGLuint fTexID;
Greg Daniel9af948d2018-08-27 09:53:51 -0400346 EGLImageKHR fImage;
Greg Daniel3860cfd2018-09-07 09:13:54 -0400347 EGLDisplay fDisplay;
Greg Daniel9af948d2018-08-27 09:53:51 -0400348};
349
Greg Daniel3860cfd2018-09-07 09:13:54 -0400350void GrAHardwareBufferImageGenerator::DeleteGLTexture(void* context) {
351 GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
Greg Daniel9af948d2018-08-27 09:53:51 -0400352 delete cleanupHelper;
353}
354
355static GrBackendTexture make_gl_backend_texture(
356 GrContext* context, AHardwareBuffer* hardwareBuffer,
Greg Danielf1251112018-08-27 09:55:03 -0400357 int width, int height, GrPixelConfig config,
Greg Daniel9af948d2018-08-27 09:53:51 -0400358 GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
Stan Ilievc01b5c72018-08-28 10:18:19 -0400359 GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
Stan Iliev114b0912018-08-31 14:02:55 -0400360 bool isProtectedContent,
361 const GrBackendFormat& backendFormat) {
Greg Daniel9af948d2018-08-27 09:53:51 -0400362 while (GL_NO_ERROR != glGetError()) {} //clear GL errors
363
Stan Ilievc01b5c72018-08-28 10:18:19 -0400364 EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
Greg Daniel9af948d2018-08-27 09:53:51 -0400365 EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
Stan Ilievc01b5c72018-08-28 10:18:19 -0400366 isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
367 isProtectedContent ? EGL_TRUE : EGL_NONE,
Greg Daniel9af948d2018-08-27 09:53:51 -0400368 EGL_NONE };
369 EGLDisplay display = eglGetCurrentDisplay();
Greg Danielf1251112018-08-27 09:55:03 -0400370 // eglCreateImageKHR will add a ref to the AHardwareBuffer
Greg Daniel9af948d2018-08-27 09:53:51 -0400371 EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
372 clientBuffer, attribs);
373 if (EGL_NO_IMAGE_KHR == image) {
374 SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
375 return GrBackendTexture();
376 }
377
378 GrGLuint texID;
379 glGenTextures(1, &texID);
380 if (!texID) {
381 eglDestroyImageKHR(display, image);
382 return GrBackendTexture();
383 }
384 glBindTexture(GL_TEXTURE_EXTERNAL_OES, texID);
385 GLenum status = GL_NO_ERROR;
386 if ((status = glGetError()) != GL_NO_ERROR) {
387 SkDebugf("glBindTexture failed (%#x)", (int) status);
388 glDeleteTextures(1, &texID);
389 eglDestroyImageKHR(display, image);
390 return GrBackendTexture();
391 }
392 glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
393 if ((status = glGetError()) != GL_NO_ERROR) {
394 SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
395 glDeleteTextures(1, &texID);
396 eglDestroyImageKHR(display, image);
397 return GrBackendTexture();
398 }
399 context->resetContext(kTextureBinding_GrGLBackendState);
400
401 GrGLTextureInfo textureInfo;
Greg Daniel9af948d2018-08-27 09:53:51 -0400402 textureInfo.fID = texID;
Stan Iliev114b0912018-08-31 14:02:55 -0400403 SkASSERT(backendFormat.isValid());
404 textureInfo.fTarget = *backendFormat.getGLTarget();
405 textureInfo.fFormat = *backendFormat.getGLFormat();
Greg Daniel9af948d2018-08-27 09:53:51 -0400406
Greg Daniel3860cfd2018-09-07 09:13:54 -0400407 *deleteProc = GrAHardwareBufferImageGenerator::DeleteGLTexture;
408 *deleteCtx = new GLCleanupHelper(texID, image, display);
Greg Daniel9af948d2018-08-27 09:53:51 -0400409
410 return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo);
411}
412
413static GrBackendTexture make_backend_texture(
414 GrContext* context, AHardwareBuffer* hardwareBuffer,
Greg Danielf1251112018-08-27 09:55:03 -0400415 int width, int height, GrPixelConfig config,
Greg Daniel9af948d2018-08-27 09:53:51 -0400416 GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
Stan Ilievc01b5c72018-08-28 10:18:19 -0400417 GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
Stan Iliev114b0912018-08-31 14:02:55 -0400418 bool isProtectedContent,
419 const GrBackendFormat& backendFormat) {
Greg Daniel637c06a2018-09-12 09:44:25 -0400420 if (context->abandoned()) {
Greg Daniel9af948d2018-08-27 09:53:51 -0400421 return GrBackendTexture();
422 }
Stan Ilievc01b5c72018-08-28 10:18:19 -0400423 bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
Greg Daniel637c06a2018-09-12 09:44:25 -0400424
425 if (kOpenGL_GrBackend == context->contextPriv().getBackend()) {
426 return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
427 deleteCtx, createProtectedImage, backendFormat);
428 } else {
429 SkASSERT(kVulkan_GrBackend == context->contextPriv().getBackend());
430#ifdef SK_VULKAN
431 // Currently we don't support protected images on vulkan
432 SkASSERT(!createProtectedImage);
433 return make_vk_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
434 deleteCtx, createProtectedImage, backendFormat);
435#else
436 return GrBackendTexture();
437#endif
438 }
Greg Daniel9af948d2018-08-27 09:53:51 -0400439}
440
Stan Iliev114b0912018-08-31 14:02:55 -0400441GrBackendFormat get_backend_format(GrBackend backend, uint32_t bufferFormat) {
442 if (backend == kOpenGL_GrBackend) {
443 switch (bufferFormat) {
444 //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
445 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
446 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
447 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
448 return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL);
449 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
450 return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL);
451 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
452 return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL);
453 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
454 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
455 return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL);
456 default:
457 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
458 }
Greg Daniel637c06a2018-09-12 09:44:25 -0400459 } else if (backend == kVulkan_GrBackend) {
460 switch (bufferFormat) {
461 //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
462 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
463 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
464 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
465 return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
466 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
467 return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
468 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
469 return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
470 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
471 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
472 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
473 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
474 default:
475 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
476 }
Stan Iliev114b0912018-08-31 14:02:55 -0400477 }
478 return GrBackendFormat();
479}
480
Greg Daniel3860cfd2018-09-07 09:13:54 -0400481sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::makeProxy(GrContext* context) {
Greg Daniel637c06a2018-09-12 09:44:25 -0400482 if (context->abandoned()) {
Greg Daniel3860cfd2018-09-07 09:13:54 -0400483 return nullptr;
Stan Iliev7e910df2017-06-02 10:29:21 -0400484 }
485
Stan Iliev7e910df2017-06-02 10:29:21 -0400486 GrPixelConfig pixelConfig;
Stan Iliev114b0912018-08-31 14:02:55 -0400487 GrBackendFormat backendFormat = get_backend_format(context->contextPriv().getBackend(),
488 fBufferFormat);
489 if (!context->contextPriv().caps()->getConfigFromBackendFormat(
490 backendFormat, this->getInfo().colorType(), &pixelConfig)) {
Greg Daniel3860cfd2018-09-07 09:13:54 -0400491 return nullptr;
Stan Iliev7e910df2017-06-02 10:29:21 -0400492 }
493
Greg Danielf1251112018-08-27 09:55:03 -0400494 int width = this->getInfo().width();
495 int height = this->getInfo().height();
Greg Daniel9af948d2018-08-27 09:53:51 -0400496
Greg Danielf1251112018-08-27 09:55:03 -0400497 GrSurfaceDesc desc;
498 desc.fWidth = width;
499 desc.fHeight = height;
500 desc.fConfig = pixelConfig;
Greg Daniel9af948d2018-08-27 09:53:51 -0400501
Greg Danielf1251112018-08-27 09:55:03 -0400502 GrTextureType textureType = GrTextureType::k2D;
503 if (context->contextPriv().getBackend() == kOpenGL_GrBackend) {
504 textureType = GrTextureType::kExternal;
Stan Iliev7e910df2017-06-02 10:29:21 -0400505 }
Greg Daniel6a0176b2018-01-30 09:28:44 -0500506
Greg Danielf1251112018-08-27 09:55:03 -0400507 auto proxyProvider = context->contextPriv().proxyProvider();
508
509 AHardwareBuffer* hardwareBuffer = fHardwareBuffer;
510 AHardwareBuffer_acquire(hardwareBuffer);
511
Stan Ilievc01b5c72018-08-28 10:18:19 -0400512 const bool isProtectedContent = fIsProtectedContent;
Greg Danielf1251112018-08-27 09:55:03 -0400513
Greg Daniel3860cfd2018-09-07 09:13:54 -0400514 sk_sp<GrTextureProxy> texProxy = proxyProvider->createLazyProxy(
515 [context, hardwareBuffer, width, height, pixelConfig, isProtectedContent, backendFormat]
Greg Danielf1251112018-08-27 09:55:03 -0400516 (GrResourceProvider* resourceProvider) {
517 if (!resourceProvider) {
518 AHardwareBuffer_release(hardwareBuffer);
519 return sk_sp<GrTexture>();
520 }
521
Greg Danielf1251112018-08-27 09:55:03 -0400522 DeleteImageProc deleteImageProc = nullptr;
523 DeleteImageCtx deleteImageCtx = nullptr;
524
525 GrBackendTexture backendTex = make_backend_texture(context, hardwareBuffer,
526 width, height, pixelConfig,
527 &deleteImageProc,
Stan Ilievc01b5c72018-08-28 10:18:19 -0400528 &deleteImageCtx,
Stan Iliev114b0912018-08-31 14:02:55 -0400529 isProtectedContent,
530 backendFormat);
Greg Danielf1251112018-08-27 09:55:03 -0400531 if (!backendTex.isValid()) {
532 return sk_sp<GrTexture>();
533 }
534 SkASSERT(deleteImageProc && deleteImageCtx);
535
536 backendTex.fConfig = pixelConfig;
Greg Daniel3860cfd2018-09-07 09:13:54 -0400537 sk_sp<GrTexture> tex = resourceProvider->wrapBackendTexture(backendTex);
Greg Danielf1251112018-08-27 09:55:03 -0400538 if (!tex) {
Greg Danielf1251112018-08-27 09:55:03 -0400539 deleteImageProc(deleteImageCtx);
540 return sk_sp<GrTexture>();
541 }
542
Greg Daniel637c06a2018-09-12 09:44:25 -0400543 if (deleteImageProc) {
544 sk_sp<GrReleaseProcHelper> releaseProcHelper(
545 new GrReleaseProcHelper(deleteImageProc, deleteImageCtx));
546 tex->setRelease(releaseProcHelper);
547 }
Stan Iliev7e910df2017-06-02 10:29:21 -0400548
Greg Danielf1251112018-08-27 09:55:03 -0400549 return tex;
550 },
Stan Iliev505dd572018-09-13 14:20:03 -0400551 desc, fSurfaceOrigin, GrMipMapped::kNo, textureType, SkBackingFit::kExact,
Greg Danielf1251112018-08-27 09:55:03 -0400552 SkBudgeted::kNo);
553
Greg Daniel3860cfd2018-09-07 09:13:54 -0400554 if (!texProxy) {
Greg Danielf1251112018-08-27 09:55:03 -0400555 AHardwareBuffer_release(hardwareBuffer);
Greg Danielf1251112018-08-27 09:55:03 -0400556 }
Greg Daniel3860cfd2018-09-07 09:13:54 -0400557 return texProxy;
558}
559
560sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::onGenerateTexture(
561 GrContext* context, const SkImageInfo& info, const SkIPoint& origin, bool willNeedMipMaps) {
562 sk_sp<GrTextureProxy> texProxy = this->makeProxy(context);
563 if (!texProxy) {
564 return nullptr;
565 }
566
567 if (0 == origin.fX && 0 == origin.fY &&
568 info.width() == this->getInfo().width() && info.height() == this->getInfo().height()) {
569 // If the caller wants the full texture we're done. The caller will handle making a copy for
570 // mip maps if that is required.
571 return texProxy;
572 }
573 // Otherwise, make a copy for the requested subset.
574 SkIRect subset = SkIRect::MakeXYWH(origin.fX, origin.fY, info.width(), info.height());
575
576 GrMipMapped mipMapped = willNeedMipMaps ? GrMipMapped::kYes : GrMipMapped::kNo;
577
578 return GrSurfaceProxy::Copy(context, texProxy.get(), mipMapped, subset, SkBudgeted::kYes);
Stan Iliev7e910df2017-06-02 10:29:21 -0400579}
Stan Iliev7e910df2017-06-02 10:29:21 -0400580
581bool GrAHardwareBufferImageGenerator::onIsValid(GrContext* context) const {
582 if (nullptr == context) {
583 return false; //CPU backend is not supported, because hardware buffer can be swizzled
584 }
Greg Daniel637c06a2018-09-12 09:44:25 -0400585 return kOpenGL_GrBackend == context->contextPriv().getBackend() ||
586 kVulkan_GrBackend == context->contextPriv().getBackend();
Stan Iliev7e910df2017-06-02 10:29:21 -0400587}
588
589#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK