Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 7 | |
Greg Daniel | 54bfb18 | 2018-11-20 17:12:36 -0500 | [diff] [blame] | 8 | #include "SkTypes.h" |
Derek Sollenberger | 7a86987 | 2017-06-27 15:37:25 -0400 | [diff] [blame] | 9 | |
| 10 | #if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 11 | #define GL_GLEXT_PROTOTYPES |
| 12 | #define EGL_EGLEXT_PROTOTYPES |
Greg Daniel | 54bfb18 | 2018-11-20 17:12:36 -0500 | [diff] [blame] | 13 | |
Greg Daniel | 54bfb18 | 2018-11-20 17:12:36 -0500 | [diff] [blame] | 14 | |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 15 | #include "GrAHardwareBufferImageGenerator.h" |
| 16 | |
Derek Sollenberger | 7a86987 | 2017-06-27 15:37:25 -0400 | [diff] [blame] | 17 | #include <android/hardware_buffer.h> |
| 18 | |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 19 | #include "GrBackendSurface.h" |
| 20 | #include "GrContext.h" |
| 21 | #include "GrContextPriv.h" |
Robert Phillips | adbe132 | 2018-01-17 13:35:46 -0500 | [diff] [blame] | 22 | #include "GrProxyProvider.h" |
Stan Iliev | dbba55d | 2017-06-28 13:24:41 -0400 | [diff] [blame] | 23 | #include "GrResourceCache.h" |
Robert Phillips | 0001828 | 2017-06-15 15:35:16 -0400 | [diff] [blame] | 24 | #include "GrResourceProvider.h" |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 25 | #include "GrResourceProviderPriv.h" |
Robert Phillips | 847d4c5 | 2017-06-13 18:21:44 -0400 | [diff] [blame] | 26 | #include "GrTexture.h" |
Robert Phillips | ade9f61 | 2017-06-16 07:32:43 -0400 | [diff] [blame] | 27 | #include "GrTextureProxy.h" |
Stan Iliev | dbba55d | 2017-06-28 13:24:41 -0400 | [diff] [blame] | 28 | #include "SkMessageBus.h" |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 29 | #include "gl/GrGLDefines.h" |
| 30 | #include "gl/GrGLTypes.h" |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 31 | |
| 32 | #include <EGL/egl.h> |
| 33 | #include <EGL/eglext.h> |
| 34 | #include <GLES/gl.h> |
| 35 | #include <GLES/glext.h> |
| 36 | |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 37 | #ifdef SK_VULKAN |
| 38 | #include "vk/GrVkExtensions.h" |
| 39 | #include "vk/GrVkGpu.h" |
| 40 | #endif |
| 41 | |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 42 | #define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content" |
| 43 | #define EGL_PROTECTED_CONTENT_EXT 0x32C0 |
| 44 | |
| 45 | static bool can_import_protected_content_eglimpl() { |
| 46 | EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY); |
| 47 | const char* exts = eglQueryString(dpy, EGL_EXTENSIONS); |
| 48 | size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR); |
| 49 | size_t extsLen = strlen(exts); |
| 50 | bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts); |
| 51 | bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1); |
| 52 | bool atEnd = (cropExtLen+1) < extsLen |
| 53 | && !strcmp(" " PROT_CONTENT_EXT_STR, |
| 54 | exts + extsLen - (cropExtLen+1)); |
| 55 | bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " "); |
| 56 | return equal || atStart || atEnd || inMiddle; |
| 57 | } |
| 58 | |
| 59 | static bool can_import_protected_content(GrContext* context) { |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 60 | if (GrBackendApi::kOpenGL == context->contextPriv().getBackend()) { |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 61 | // Only compute whether the extension is present once the first time this |
| 62 | // function is called. |
| 63 | static bool hasIt = can_import_protected_content_eglimpl(); |
| 64 | return hasIt; |
| 65 | } |
| 66 | return false; |
| 67 | } |
| 68 | |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 69 | std::unique_ptr<SkImageGenerator> GrAHardwareBufferImageGenerator::Make( |
Stan Iliev | 505dd57 | 2018-09-13 14:20:03 -0400 | [diff] [blame] | 70 | AHardwareBuffer* graphicBuffer, SkAlphaType alphaType, sk_sp<SkColorSpace> colorSpace, |
| 71 | GrSurfaceOrigin surfaceOrigin) { |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 72 | AHardwareBuffer_Desc bufferDesc; |
| 73 | AHardwareBuffer_describe(graphicBuffer, &bufferDesc); |
| 74 | SkColorType colorType; |
| 75 | switch (bufferDesc.format) { |
| 76 | case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: |
Stan Iliev | 733b265 | 2018-10-08 16:15:37 -0400 | [diff] [blame] | 77 | case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 78 | colorType = kRGBA_8888_SkColorType; |
| 79 | break; |
| 80 | case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: |
| 81 | colorType = kRGBA_F16_SkColorType; |
| 82 | break; |
| 83 | case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: |
| 84 | colorType = kRGB_565_SkColorType; |
| 85 | break; |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 86 | case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: |
| 87 | colorType = kRGB_888x_SkColorType; |
| 88 | break; |
| 89 | case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: |
| 90 | colorType = kRGBA_1010102_SkColorType; |
| 91 | break; |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 92 | default: |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 93 | // Given that we only use this texture as a source, colorType will not impact how Skia uses |
| 94 | // the texture. The only potential affect this is anticipated to have is that for some |
| 95 | // format types if we are not bound as an OES texture we may get invalid results for SKP |
| 96 | // capture if we read back the texture. |
| 97 | colorType = kRGBA_8888_SkColorType; |
| 98 | break; |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 99 | } |
| 100 | SkImageInfo info = SkImageInfo::Make(bufferDesc.width, bufferDesc.height, colorType, |
| 101 | alphaType, std::move(colorSpace)); |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 102 | bool createProtectedImage = 0 != (bufferDesc.usage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT); |
Stan Iliev | 505dd57 | 2018-09-13 14:20:03 -0400 | [diff] [blame] | 103 | return std::unique_ptr<SkImageGenerator>(new GrAHardwareBufferImageGenerator( |
| 104 | info, graphicBuffer, alphaType, createProtectedImage, |
| 105 | bufferDesc.format, surfaceOrigin)); |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 106 | } |
| 107 | |
| 108 | GrAHardwareBufferImageGenerator::GrAHardwareBufferImageGenerator(const SkImageInfo& info, |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 109 | AHardwareBuffer* hardwareBuffer, SkAlphaType alphaType, bool isProtectedContent, |
Stan Iliev | 505dd57 | 2018-09-13 14:20:03 -0400 | [diff] [blame] | 110 | uint32_t bufferFormat, GrSurfaceOrigin surfaceOrigin) |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 111 | : INHERITED(info) |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 112 | , fHardwareBuffer(hardwareBuffer) |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 113 | , fBufferFormat(bufferFormat) |
Stan Iliev | 505dd57 | 2018-09-13 14:20:03 -0400 | [diff] [blame] | 114 | , fIsProtectedContent(isProtectedContent) |
| 115 | , fSurfaceOrigin(surfaceOrigin) { |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 116 | AHardwareBuffer_acquire(fHardwareBuffer); |
| 117 | } |
| 118 | |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 119 | GrAHardwareBufferImageGenerator::~GrAHardwareBufferImageGenerator() { |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 120 | AHardwareBuffer_release(fHardwareBuffer); |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 121 | } |
| 122 | |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 123 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
| 124 | |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 125 | #ifdef SK_VULKAN |
| 126 | |
| 127 | class VulkanCleanupHelper { |
| 128 | public: |
| 129 | VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory) |
| 130 | : fDevice(gpu->device()) |
| 131 | , fImage(image) |
| 132 | , fMemory(memory) |
| 133 | , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage) |
| 134 | , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {} |
| 135 | ~VulkanCleanupHelper() { |
| 136 | fDestroyImage(fDevice, fImage, nullptr); |
| 137 | fFreeMemory(fDevice, fMemory, nullptr); |
| 138 | } |
| 139 | private: |
| 140 | VkDevice fDevice; |
| 141 | VkImage fImage; |
| 142 | VkDeviceMemory fMemory; |
| 143 | PFN_vkDestroyImage fDestroyImage; |
| 144 | PFN_vkFreeMemory fFreeMemory; |
| 145 | }; |
| 146 | |
| 147 | void GrAHardwareBufferImageGenerator::DeleteVkImage(void* context) { |
| 148 | VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context); |
| 149 | delete cleanupHelper; |
| 150 | } |
| 151 | |
| 152 | #define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X; |
| 153 | |
| 154 | static GrBackendTexture make_vk_backend_texture( |
| 155 | GrContext* context, AHardwareBuffer* hardwareBuffer, |
| 156 | int width, int height, GrPixelConfig config, |
| 157 | GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc, |
| 158 | GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx, |
| 159 | bool isProtectedContent, |
| 160 | const GrBackendFormat& backendFormat) { |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 161 | SkASSERT(context->contextPriv().getBackend() == GrBackendApi::kVulkan); |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 162 | GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu()); |
| 163 | |
| 164 | VkPhysicalDevice physicalDevice = gpu->physicalDevice(); |
| 165 | VkDevice device = gpu->device(); |
| 166 | |
| 167 | SkASSERT(gpu); |
| 168 | |
| 169 | if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) { |
| 170 | return GrBackendTexture(); |
| 171 | } |
| 172 | |
| 173 | SkASSERT(backendFormat.getVkFormat()); |
| 174 | VkFormat format = *backendFormat.getVkFormat(); |
| 175 | |
| 176 | VkResult err; |
| 177 | |
| 178 | VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps; |
| 179 | hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; |
| 180 | hwbFormatProps.pNext = nullptr; |
| 181 | |
| 182 | VkAndroidHardwareBufferPropertiesANDROID hwbProps; |
| 183 | hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; |
| 184 | hwbProps.pNext = &hwbFormatProps; |
| 185 | |
| 186 | err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps)); |
| 187 | if (VK_SUCCESS != err) { |
| 188 | return GrBackendTexture(); |
| 189 | } |
| 190 | |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 191 | VkExternalFormatANDROID externalFormat; |
| 192 | externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID; |
| 193 | externalFormat.pNext = nullptr; |
| 194 | externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct. |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 195 | |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 196 | const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo(); |
| 197 | if (!ycbcrConversion) { |
| 198 | return GrBackendTexture(); |
| 199 | } |
| 200 | |
| 201 | if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) { |
| 202 | // TODO: We should not assume the transfer features here and instead should have a way for |
| 203 | // Ganesh's tracking of intenral images to report whether or not they support transfers. |
| 204 | SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) && |
| 205 | SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) && |
| 206 | SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures)); |
| 207 | SkASSERT(!ycbcrConversion->isValid()); |
| 208 | } else { |
| 209 | SkASSERT(ycbcrConversion->isValid()); |
| 210 | // We have an external only format |
| 211 | SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures)); |
| 212 | SkASSERT(format == VK_FORMAT_UNDEFINED); |
| 213 | SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat); |
| 214 | externalFormat.externalFormat = hwbFormatProps.externalFormat; |
| 215 | } |
| 216 | SkASSERT(format == hwbFormatProps.format); |
| 217 | |
| 218 | const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{ |
| 219 | VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType |
| 220 | &externalFormat, // pNext |
| 221 | VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 222 | }; |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 223 | VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT; |
| 224 | if (format != VK_FORMAT_UNDEFINED) { |
| 225 | usageFlags = usageFlags | |
| 226 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | |
| 227 | VK_IMAGE_USAGE_TRANSFER_DST_BIT; |
| 228 | } |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 229 | |
| 230 | // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have |
| 231 | // to use linear. Add better linear support throughout Ganesh. |
| 232 | VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; |
| 233 | |
| 234 | const VkImageCreateInfo imageCreateInfo = { |
| 235 | VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType |
| 236 | &externalMemoryImageInfo, // pNext |
| 237 | 0, // VkImageCreateFlags |
| 238 | VK_IMAGE_TYPE_2D, // VkImageType |
| 239 | format, // VkFormat |
| 240 | { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D |
| 241 | 1, // mipLevels |
| 242 | 1, // arrayLayers |
| 243 | VK_SAMPLE_COUNT_1_BIT, // samples |
| 244 | tiling, // VkImageTiling |
| 245 | usageFlags, // VkImageUsageFlags |
| 246 | VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode |
| 247 | 0, // queueFamilyCount |
| 248 | 0, // pQueueFamilyIndices |
| 249 | VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout |
| 250 | }; |
| 251 | |
| 252 | VkImage image; |
| 253 | err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image)); |
| 254 | if (VK_SUCCESS != err) { |
| 255 | return GrBackendTexture(); |
| 256 | } |
| 257 | |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 258 | VkPhysicalDeviceMemoryProperties2 phyDevMemProps; |
| 259 | phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; |
| 260 | phyDevMemProps.pNext = nullptr; |
| 261 | |
| 262 | uint32_t typeIndex = 0; |
| 263 | uint32_t heapIndex = 0; |
| 264 | bool foundHeap = false; |
| 265 | VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps)); |
| 266 | uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount; |
| 267 | for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) { |
| 268 | if (hwbProps.memoryTypeBits & (1 << i)) { |
| 269 | const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties; |
| 270 | uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags & |
| 271 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; |
| 272 | if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { |
| 273 | typeIndex = i; |
| 274 | heapIndex = pdmp.memoryTypes[i].heapIndex; |
| 275 | foundHeap = true; |
| 276 | } |
| 277 | } |
| 278 | } |
| 279 | if (!foundHeap) { |
| 280 | VK_CALL(DestroyImage(device, image, nullptr)); |
| 281 | return GrBackendTexture(); |
| 282 | } |
| 283 | |
| 284 | VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo; |
| 285 | hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID; |
| 286 | hwbImportInfo.pNext = nullptr; |
| 287 | hwbImportInfo.buffer = hardwareBuffer; |
| 288 | |
| 289 | VkMemoryDedicatedAllocateInfo dedicatedAllocInfo; |
| 290 | dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO; |
| 291 | dedicatedAllocInfo.pNext = &hwbImportInfo; |
| 292 | dedicatedAllocInfo.image = image; |
| 293 | dedicatedAllocInfo.buffer = VK_NULL_HANDLE; |
| 294 | |
| 295 | VkMemoryAllocateInfo allocInfo = { |
| 296 | VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType |
| 297 | &dedicatedAllocInfo, // pNext |
| 298 | hwbProps.allocationSize, // allocationSize |
| 299 | typeIndex, // memoryTypeIndex |
| 300 | }; |
| 301 | |
| 302 | VkDeviceMemory memory; |
| 303 | |
| 304 | err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory)); |
| 305 | if (VK_SUCCESS != err) { |
| 306 | VK_CALL(DestroyImage(device, image, nullptr)); |
| 307 | return GrBackendTexture(); |
| 308 | } |
| 309 | |
| 310 | VkBindImageMemoryInfo bindImageInfo; |
| 311 | bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO; |
| 312 | bindImageInfo.pNext = nullptr; |
| 313 | bindImageInfo.image = image; |
| 314 | bindImageInfo.memory = memory; |
| 315 | bindImageInfo.memoryOffset = 0; |
| 316 | |
| 317 | err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo)); |
| 318 | if (VK_SUCCESS != err) { |
| 319 | VK_CALL(DestroyImage(device, image, nullptr)); |
| 320 | VK_CALL(FreeMemory(device, memory, nullptr)); |
| 321 | return GrBackendTexture(); |
| 322 | } |
| 323 | |
| 324 | GrVkImageInfo imageInfo; |
| 325 | |
| 326 | imageInfo.fImage = image; |
| 327 | imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0); |
| 328 | imageInfo.fImageTiling = tiling; |
| 329 | imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
| 330 | imageInfo.fFormat = format; |
| 331 | imageInfo.fLevelCount = 1; |
| 332 | // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not |
| 333 | // support that extension. Or if we know the source of the AHardwareBuffer is not from a |
| 334 | // "foreign" device we can leave them as external. |
| 335 | imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 336 | imageInfo.fYcbcrConversionInfo = *ycbcrConversion; |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 337 | |
| 338 | *deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage; |
| 339 | *deleteCtx = new VulkanCleanupHelper(gpu, image, memory); |
| 340 | |
| 341 | return GrBackendTexture(width, height, imageInfo); |
| 342 | } |
| 343 | #endif |
| 344 | |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 345 | class GLCleanupHelper { |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 346 | public: |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 347 | GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display) |
| 348 | : fTexID(texID) |
| 349 | , fImage(image) |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 350 | , fDisplay(display) { } |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 351 | ~GLCleanupHelper() { |
| 352 | glDeleteTextures(1, &fTexID); |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 353 | // eglDestroyImageKHR will remove a ref from the AHardwareBuffer |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 354 | eglDestroyImageKHR(fDisplay, fImage); |
| 355 | } |
| 356 | private: |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 357 | GrGLuint fTexID; |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 358 | EGLImageKHR fImage; |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 359 | EGLDisplay fDisplay; |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 360 | }; |
| 361 | |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 362 | void GrAHardwareBufferImageGenerator::DeleteGLTexture(void* context) { |
| 363 | GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context); |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 364 | delete cleanupHelper; |
| 365 | } |
| 366 | |
| 367 | static GrBackendTexture make_gl_backend_texture( |
| 368 | GrContext* context, AHardwareBuffer* hardwareBuffer, |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 369 | int width, int height, GrPixelConfig config, |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 370 | GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc, |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 371 | GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx, |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 372 | bool isProtectedContent, |
| 373 | const GrBackendFormat& backendFormat) { |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 374 | while (GL_NO_ERROR != glGetError()) {} //clear GL errors |
| 375 | |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 376 | EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer); |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 377 | EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 378 | isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE, |
| 379 | isProtectedContent ? EGL_TRUE : EGL_NONE, |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 380 | EGL_NONE }; |
| 381 | EGLDisplay display = eglGetCurrentDisplay(); |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 382 | // eglCreateImageKHR will add a ref to the AHardwareBuffer |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 383 | EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, |
| 384 | clientBuffer, attribs); |
| 385 | if (EGL_NO_IMAGE_KHR == image) { |
| 386 | SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() ); |
| 387 | return GrBackendTexture(); |
| 388 | } |
| 389 | |
| 390 | GrGLuint texID; |
| 391 | glGenTextures(1, &texID); |
| 392 | if (!texID) { |
| 393 | eglDestroyImageKHR(display, image); |
| 394 | return GrBackendTexture(); |
| 395 | } |
| 396 | glBindTexture(GL_TEXTURE_EXTERNAL_OES, texID); |
| 397 | GLenum status = GL_NO_ERROR; |
| 398 | if ((status = glGetError()) != GL_NO_ERROR) { |
| 399 | SkDebugf("glBindTexture failed (%#x)", (int) status); |
| 400 | glDeleteTextures(1, &texID); |
| 401 | eglDestroyImageKHR(display, image); |
| 402 | return GrBackendTexture(); |
| 403 | } |
| 404 | glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image); |
| 405 | if ((status = glGetError()) != GL_NO_ERROR) { |
| 406 | SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status); |
| 407 | glDeleteTextures(1, &texID); |
| 408 | eglDestroyImageKHR(display, image); |
| 409 | return GrBackendTexture(); |
| 410 | } |
| 411 | context->resetContext(kTextureBinding_GrGLBackendState); |
| 412 | |
| 413 | GrGLTextureInfo textureInfo; |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 414 | textureInfo.fID = texID; |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 415 | SkASSERT(backendFormat.isValid()); |
| 416 | textureInfo.fTarget = *backendFormat.getGLTarget(); |
| 417 | textureInfo.fFormat = *backendFormat.getGLFormat(); |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 418 | |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 419 | *deleteProc = GrAHardwareBufferImageGenerator::DeleteGLTexture; |
| 420 | *deleteCtx = new GLCleanupHelper(texID, image, display); |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 421 | |
| 422 | return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo); |
| 423 | } |
| 424 | |
| 425 | static GrBackendTexture make_backend_texture( |
| 426 | GrContext* context, AHardwareBuffer* hardwareBuffer, |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 427 | int width, int height, GrPixelConfig config, |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 428 | GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc, |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 429 | GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx, |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 430 | bool isProtectedContent, |
| 431 | const GrBackendFormat& backendFormat) { |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 432 | if (context->abandoned()) { |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 433 | return GrBackendTexture(); |
| 434 | } |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 435 | bool createProtectedImage = isProtectedContent && can_import_protected_content(context); |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 436 | |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 437 | if (GrBackendApi::kOpenGL == context->contextPriv().getBackend()) { |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 438 | return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc, |
| 439 | deleteCtx, createProtectedImage, backendFormat); |
| 440 | } else { |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 441 | SkASSERT(GrBackendApi::kVulkan == context->contextPriv().getBackend()); |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 442 | #ifdef SK_VULKAN |
| 443 | // Currently we don't support protected images on vulkan |
| 444 | SkASSERT(!createProtectedImage); |
| 445 | return make_vk_backend_texture(context, hardwareBuffer, width, height, config, deleteProc, |
| 446 | deleteCtx, createProtectedImage, backendFormat); |
| 447 | #else |
| 448 | return GrBackendTexture(); |
| 449 | #endif |
| 450 | } |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 451 | } |
| 452 | |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 453 | GrBackendFormat get_backend_format(GrContext* context, AHardwareBuffer* hardwareBuffer, |
| 454 | GrBackendApi backend, uint32_t bufferFormat) { |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 455 | if (backend == GrBackendApi::kOpenGL) { |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 456 | switch (bufferFormat) { |
| 457 | //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D |
| 458 | case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: |
Stan Iliev | 733b265 | 2018-10-08 16:15:37 -0400 | [diff] [blame] | 459 | case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 460 | return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL); |
| 461 | case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: |
| 462 | return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL); |
| 463 | case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: |
| 464 | return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL); |
| 465 | case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: |
| 466 | return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL); |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 467 | case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: |
| 468 | return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL); |
| 469 | default: |
| 470 | return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL); |
| 471 | } |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 472 | } else if (backend == GrBackendApi::kVulkan) { |
Greg Daniel | e4beab7 | 2018-12-20 15:07:21 -0500 | [diff] [blame] | 473 | #ifdef SK_VULKAN |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 474 | switch (bufferFormat) { |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 475 | case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: |
| 476 | return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM); |
| 477 | case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: |
| 478 | return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT); |
| 479 | case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: |
| 480 | return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16); |
| 481 | case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: |
| 482 | return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32); |
| 483 | case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: |
| 484 | return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM); |
| 485 | case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: |
| 486 | return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM); |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 487 | default: { |
| 488 | GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu()); |
| 489 | SkASSERT(gpu); |
| 490 | VkDevice device = gpu->device(); |
| 491 | |
| 492 | if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) { |
| 493 | return GrBackendFormat(); |
| 494 | } |
| 495 | VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps; |
| 496 | hwbFormatProps.sType = |
| 497 | VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; |
| 498 | hwbFormatProps.pNext = nullptr; |
| 499 | |
| 500 | VkAndroidHardwareBufferPropertiesANDROID hwbProps; |
| 501 | hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; |
| 502 | hwbProps.pNext = &hwbFormatProps; |
| 503 | |
| 504 | VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, |
| 505 | &hwbProps)); |
| 506 | if (VK_SUCCESS != err) { |
| 507 | return GrBackendFormat(); |
| 508 | } |
| 509 | |
| 510 | if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) { |
| 511 | return GrBackendFormat(); |
| 512 | } |
| 513 | |
| 514 | GrVkYcbcrConversionInfo ycbcrConversion; |
| 515 | ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel; |
| 516 | ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange; |
| 517 | ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset; |
| 518 | ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset; |
| 519 | ycbcrConversion.fForceExplicitReconstruction = VK_FALSE; |
| 520 | ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat; |
| 521 | ycbcrConversion.fExternalFormatFeatures = hwbFormatProps.formatFeatures; |
| 522 | if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT & |
| 523 | hwbFormatProps.formatFeatures) { |
| 524 | ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR; |
| 525 | } else { |
| 526 | ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST; |
| 527 | } |
| 528 | |
| 529 | return GrBackendFormat::MakeVk(ycbcrConversion); |
| 530 | } |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 531 | } |
Greg Daniel | e4beab7 | 2018-12-20 15:07:21 -0500 | [diff] [blame] | 532 | #else |
| 533 | return GrBackendFormat(); |
| 534 | #endif |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 535 | } |
| 536 | return GrBackendFormat(); |
| 537 | } |
| 538 | |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 539 | sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::makeProxy(GrContext* context) { |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 540 | if (context->abandoned()) { |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 541 | return nullptr; |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 542 | } |
| 543 | |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 544 | GrBackendFormat backendFormat = get_backend_format(context, fHardwareBuffer, |
| 545 | context->contextPriv().getBackend(), |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 546 | fBufferFormat); |
Brian Salomon | f391d0f | 2018-12-14 09:18:50 -0500 | [diff] [blame] | 547 | GrPixelConfig pixelConfig = context->contextPriv().caps()->getConfigFromBackendFormat( |
| 548 | backendFormat, this->getInfo().colorType()); |
| 549 | |
| 550 | if (pixelConfig == kUnknown_GrPixelConfig) { |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 551 | return nullptr; |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 552 | } |
| 553 | |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 554 | int width = this->getInfo().width(); |
| 555 | int height = this->getInfo().height(); |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 556 | |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 557 | GrSurfaceDesc desc; |
| 558 | desc.fWidth = width; |
| 559 | desc.fHeight = height; |
| 560 | desc.fConfig = pixelConfig; |
Greg Daniel | 9af948d | 2018-08-27 09:53:51 -0400 | [diff] [blame] | 561 | |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 562 | GrTextureType textureType = GrTextureType::k2D; |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 563 | if (context->contextPriv().getBackend() == GrBackendApi::kOpenGL) { |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 564 | textureType = GrTextureType::kExternal; |
Greg Daniel | 14c55c2 | 2018-12-04 11:25:03 -0500 | [diff] [blame] | 565 | } else if (context->contextPriv().getBackend() == GrBackendApi::kVulkan) { |
| 566 | const VkFormat* format = backendFormat.getVkFormat(); |
| 567 | SkASSERT(format); |
| 568 | if (*format == VK_FORMAT_UNDEFINED) { |
| 569 | textureType = GrTextureType::kExternal; |
| 570 | } |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 571 | } |
Greg Daniel | 6a0176b | 2018-01-30 09:28:44 -0500 | [diff] [blame] | 572 | |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 573 | auto proxyProvider = context->contextPriv().proxyProvider(); |
| 574 | |
| 575 | AHardwareBuffer* hardwareBuffer = fHardwareBuffer; |
| 576 | AHardwareBuffer_acquire(hardwareBuffer); |
| 577 | |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 578 | const bool isProtectedContent = fIsProtectedContent; |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 579 | |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 580 | sk_sp<GrTextureProxy> texProxy = proxyProvider->createLazyProxy( |
Brian Salomon | c67c31c | 2018-12-06 10:00:03 -0500 | [diff] [blame] | 581 | [context, hardwareBuffer, width, height, pixelConfig, isProtectedContent, |
| 582 | backendFormat](GrResourceProvider* resourceProvider) { |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 583 | if (!resourceProvider) { |
| 584 | AHardwareBuffer_release(hardwareBuffer); |
| 585 | return sk_sp<GrTexture>(); |
| 586 | } |
| 587 | |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 588 | DeleteImageProc deleteImageProc = nullptr; |
| 589 | DeleteImageCtx deleteImageCtx = nullptr; |
| 590 | |
| 591 | GrBackendTexture backendTex = make_backend_texture(context, hardwareBuffer, |
| 592 | width, height, pixelConfig, |
| 593 | &deleteImageProc, |
Stan Iliev | c01b5c7 | 2018-08-28 10:18:19 -0400 | [diff] [blame] | 594 | &deleteImageCtx, |
Stan Iliev | 114b091 | 2018-08-31 14:02:55 -0400 | [diff] [blame] | 595 | isProtectedContent, |
| 596 | backendFormat); |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 597 | if (!backendTex.isValid()) { |
| 598 | return sk_sp<GrTexture>(); |
| 599 | } |
| 600 | SkASSERT(deleteImageProc && deleteImageCtx); |
| 601 | |
| 602 | backendTex.fConfig = pixelConfig; |
Brian Salomon | c67c31c | 2018-12-06 10:00:03 -0500 | [diff] [blame] | 603 | sk_sp<GrTexture> tex = resourceProvider->wrapBackendTexture( |
| 604 | backendTex, kBorrow_GrWrapOwnership, kRead_GrIOType); |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 605 | if (!tex) { |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 606 | deleteImageProc(deleteImageCtx); |
| 607 | return sk_sp<GrTexture>(); |
| 608 | } |
| 609 | |
Greg Daniel | 637c06a | 2018-09-12 09:44:25 -0400 | [diff] [blame] | 610 | if (deleteImageProc) { |
| 611 | sk_sp<GrReleaseProcHelper> releaseProcHelper( |
| 612 | new GrReleaseProcHelper(deleteImageProc, deleteImageCtx)); |
| 613 | tex->setRelease(releaseProcHelper); |
| 614 | } |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 615 | |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 616 | return tex; |
| 617 | }, |
Brian Salomon | c67c31c | 2018-12-06 10:00:03 -0500 | [diff] [blame] | 618 | backendFormat, desc, fSurfaceOrigin, GrMipMapped::kNo, |
| 619 | GrInternalSurfaceFlags::kReadOnly, SkBackingFit::kExact, SkBudgeted::kNo); |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 620 | |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 621 | if (!texProxy) { |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 622 | AHardwareBuffer_release(hardwareBuffer); |
Greg Daniel | f125111 | 2018-08-27 09:55:03 -0400 | [diff] [blame] | 623 | } |
Greg Daniel | 3860cfd1 | 2018-09-07 09:13:54 -0400 | [diff] [blame] | 624 | return texProxy; |
| 625 | } |
| 626 | |
| 627 | sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::onGenerateTexture( |
| 628 | GrContext* context, const SkImageInfo& info, const SkIPoint& origin, bool willNeedMipMaps) { |
| 629 | sk_sp<GrTextureProxy> texProxy = this->makeProxy(context); |
| 630 | if (!texProxy) { |
| 631 | return nullptr; |
| 632 | } |
| 633 | |
| 634 | if (0 == origin.fX && 0 == origin.fY && |
| 635 | info.width() == this->getInfo().width() && info.height() == this->getInfo().height()) { |
| 636 | // If the caller wants the full texture we're done. The caller will handle making a copy for |
| 637 | // mip maps if that is required. |
| 638 | return texProxy; |
| 639 | } |
| 640 | // Otherwise, make a copy for the requested subset. |
| 641 | SkIRect subset = SkIRect::MakeXYWH(origin.fX, origin.fY, info.width(), info.height()); |
| 642 | |
| 643 | GrMipMapped mipMapped = willNeedMipMaps ? GrMipMapped::kYes : GrMipMapped::kNo; |
| 644 | |
Brian Salomon | fee3f9b | 2018-12-19 12:34:12 -0500 | [diff] [blame] | 645 | return GrSurfaceProxy::Copy(context, texProxy.get(), mipMapped, subset, SkBackingFit::kExact, |
| 646 | SkBudgeted::kYes); |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 647 | } |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 648 | |
| 649 | bool GrAHardwareBufferImageGenerator::onIsValid(GrContext* context) const { |
| 650 | if (nullptr == context) { |
| 651 | return false; //CPU backend is not supported, because hardware buffer can be swizzled |
| 652 | } |
Greg Daniel | bdf12ad | 2018-10-12 09:31:11 -0400 | [diff] [blame] | 653 | return GrBackendApi::kOpenGL == context->contextPriv().getBackend() || |
| 654 | GrBackendApi::kVulkan == context->contextPriv().getBackend(); |
Stan Iliev | 7e910df | 2017-06-02 10:29:21 -0400 | [diff] [blame] | 655 | } |
| 656 | |
| 657 | #endif //SK_BUILD_FOR_ANDROID_FRAMEWORK |