Add Vulkan support to GrAHardwareBufferImageGenerator
Bug: skia:
Change-Id: I37b08f336919fdac77fcd2726f129425bf8f4f2b
Reviewed-on: https://skia-review.googlesource.com/150132
Reviewed-by: Stan Iliev <stani@google.com>
Reviewed-by: Jim Van Verth <jvanverth@google.com>
Commit-Queue: Greg Daniel <egdaniel@google.com>
diff --git a/src/gpu/GrAHardwareBufferImageGenerator.cpp b/src/gpu/GrAHardwareBufferImageGenerator.cpp
index 359aa38..06b4131 100644
--- a/src/gpu/GrAHardwareBufferImageGenerator.cpp
+++ b/src/gpu/GrAHardwareBufferImageGenerator.cpp
@@ -20,6 +20,7 @@
#include "GrProxyProvider.h"
#include "GrResourceCache.h"
#include "GrResourceProvider.h"
+#include "GrResourceProviderPriv.h"
#include "GrTexture.h"
#include "GrTextureProxy.h"
#include "SkMessageBus.h"
@@ -31,6 +32,11 @@
#include <GLES/gl.h>
#include <GLES/glext.h>
+#ifdef SK_VULKAN
+#include "vk/GrVkExtensions.h"
+#include "vk/GrVkGpu.h"
+#endif
+
#define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
#define EGL_PROTECTED_CONTENT_EXT 0x32C0
@@ -111,6 +117,216 @@
///////////////////////////////////////////////////////////////////////////////////////////////////
+#ifdef SK_VULKAN
+
+class VulkanCleanupHelper {
+public:
+ VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
+ : fDevice(gpu->device())
+ , fImage(image)
+ , fMemory(memory)
+ , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
+ , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
+ ~VulkanCleanupHelper() {
+ fDestroyImage(fDevice, fImage, nullptr);
+ fFreeMemory(fDevice, fMemory, nullptr);
+ }
+private:
+ VkDevice fDevice;
+ VkImage fImage;
+ VkDeviceMemory fMemory;
+ PFN_vkDestroyImage fDestroyImage;
+ PFN_vkFreeMemory fFreeMemory;
+};
+
+void GrAHardwareBufferImageGenerator::DeleteVkImage(void* context) {
+ VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
+ delete cleanupHelper;
+}
+
+#define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
+
+static GrBackendTexture make_vk_backend_texture(
+ GrContext* context, AHardwareBuffer* hardwareBuffer,
+ int width, int height, GrPixelConfig config,
+ GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
+ GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
+ bool isProtectedContent,
+ const GrBackendFormat& backendFormat) {
+ SkASSERT(context->contextPriv().getBackend() == kVulkan_GrBackend);
+ GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
+
+ VkPhysicalDevice physicalDevice = gpu->physicalDevice();
+ VkDevice device = gpu->device();
+
+ SkASSERT(gpu);
+
+ if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
+ return GrBackendTexture();
+ }
+
+ SkASSERT(backendFormat.getVkFormat());
+ VkFormat format = *backendFormat.getVkFormat();
+
+ VkResult err;
+
+ VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
+ hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+ hwbFormatProps.pNext = nullptr;
+
+ VkAndroidHardwareBufferPropertiesANDROID hwbProps;
+ hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+ hwbProps.pNext = &hwbFormatProps;
+
+ err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
+ if (VK_SUCCESS != err) {
+ return GrBackendTexture();
+ }
+
+ SkASSERT(format == hwbFormatProps.format);
+ SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
+ SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
+ SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
+
+ const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
+ VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
+ nullptr, // pNext
+ VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
+ };
+ VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+ // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
+ // to use linear. Add better linear support throughout Ganesh.
+ VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
+
+ const VkImageCreateInfo imageCreateInfo = {
+ VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
+ &externalMemoryImageInfo, // pNext
+ 0, // VkImageCreateFlags
+ VK_IMAGE_TYPE_2D, // VkImageType
+ format, // VkFormat
+ { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
+ 1, // mipLevels
+ 1, // arrayLayers
+ VK_SAMPLE_COUNT_1_BIT, // samples
+ tiling, // VkImageTiling
+ usageFlags, // VkImageUsageFlags
+ VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
+ 0, // queueFamilyCount
+ 0, // pQueueFamilyIndices
+ VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
+ };
+
+ VkImage image;
+ err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
+ if (VK_SUCCESS != err) {
+ return GrBackendTexture();
+ }
+
+ VkImageMemoryRequirementsInfo2 memReqsInfo;
+ memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+ memReqsInfo.pNext = nullptr;
+ memReqsInfo.image = image;
+
+ VkMemoryDedicatedRequirements dedicatedMemReqs;
+ dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
+ dedicatedMemReqs.pNext = nullptr;
+
+ VkMemoryRequirements2 memReqs;
+ memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
+ memReqs.pNext = &dedicatedMemReqs;
+
+ VK_CALL(GetImageMemoryRequirements2(device, &memReqsInfo, &memReqs));
+ SkASSERT(VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation);
+
+ VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
+ phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
+ phyDevMemProps.pNext = nullptr;
+
+ uint32_t typeIndex = 0;
+ uint32_t heapIndex = 0;
+ bool foundHeap = false;
+ VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
+ uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
+ for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
+ if (hwbProps.memoryTypeBits & (1 << i)) {
+ const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
+ uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
+ VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
+ typeIndex = i;
+ heapIndex = pdmp.memoryTypes[i].heapIndex;
+ foundHeap = true;
+ }
+ }
+ }
+ if (!foundHeap) {
+ VK_CALL(DestroyImage(device, image, nullptr));
+ return GrBackendTexture();
+ }
+
+ VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
+ hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+ hwbImportInfo.pNext = nullptr;
+ hwbImportInfo.buffer = hardwareBuffer;
+
+ VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
+ dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
+ dedicatedAllocInfo.pNext = &hwbImportInfo;
+ dedicatedAllocInfo.image = image;
+ dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
+
+ VkMemoryAllocateInfo allocInfo = {
+ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
+ &dedicatedAllocInfo, // pNext
+ hwbProps.allocationSize, // allocationSize
+ typeIndex, // memoryTypeIndex
+ };
+
+ VkDeviceMemory memory;
+
+ err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
+ if (VK_SUCCESS != err) {
+ VK_CALL(DestroyImage(device, image, nullptr));
+ return GrBackendTexture();
+ }
+
+ VkBindImageMemoryInfo bindImageInfo;
+ bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+ bindImageInfo.pNext = nullptr;
+ bindImageInfo.image = image;
+ bindImageInfo.memory = memory;
+ bindImageInfo.memoryOffset = 0;
+
+ err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
+ if (VK_SUCCESS != err) {
+ VK_CALL(DestroyImage(device, image, nullptr));
+ VK_CALL(FreeMemory(device, memory, nullptr));
+ return GrBackendTexture();
+ }
+
+ GrVkImageInfo imageInfo;
+
+ imageInfo.fImage = image;
+ imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
+ imageInfo.fImageTiling = tiling;
+ imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ imageInfo.fFormat = format;
+ imageInfo.fLevelCount = 1;
+ // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
+ // support that extension. Or if we know the source of the AHardwareBuffer is not from a
+ // "foreign" device we can leave them as external.
+ imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
+
+ *deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage;
+ *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
+
+ return GrBackendTexture(width, height, imageInfo);
+}
+#endif
+
class GLCleanupHelper {
public:
GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
@@ -128,7 +344,6 @@
EGLDisplay fDisplay;
};
-
void GrAHardwareBufferImageGenerator::DeleteGLTexture(void* context) {
GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
delete cleanupHelper;
@@ -199,13 +414,25 @@
GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
bool isProtectedContent,
const GrBackendFormat& backendFormat) {
- if (context->abandoned() || kOpenGL_GrBackend != context->contextPriv().getBackend()) {
- // Check if GrContext is not abandoned and the backend is GL.
+ if (context->abandoned()) {
return GrBackendTexture();
}
bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
- return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
- deleteCtx, createProtectedImage, backendFormat);
+
+ if (kOpenGL_GrBackend == context->contextPriv().getBackend()) {
+ return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
+ deleteCtx, createProtectedImage, backendFormat);
+ } else {
+ SkASSERT(kVulkan_GrBackend == context->contextPriv().getBackend());
+#ifdef SK_VULKAN
+ // Currently we don't support protected images on vulkan
+ SkASSERT(!createProtectedImage);
+ return make_vk_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
+ deleteCtx, createProtectedImage, backendFormat);
+#else
+ return GrBackendTexture();
+#endif
+ }
}
GrBackendFormat get_backend_format(GrBackend backend, uint32_t bufferFormat) {
@@ -226,13 +453,30 @@
default:
return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
}
+ } else if (backend == kVulkan_GrBackend) {
+ switch (bufferFormat) {
+ //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
+ case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
+ return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
+ case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
+ return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
+ case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
+ return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
+ case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
+ return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
+ case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
+ return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
+ case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
+ return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
+ default:
+ return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
+ }
}
return GrBackendFormat();
}
sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::makeProxy(GrContext* context) {
- if (context->abandoned() || kOpenGL_GrBackend != context->contextPriv().getBackend()) {
- // Check if GrContext is not abandoned and the backend is GL.
+ if (context->abandoned()) {
return nullptr;
}
@@ -293,9 +537,11 @@
return sk_sp<GrTexture>();
}
- sk_sp<GrReleaseProcHelper> releaseProcHelper(
- new GrReleaseProcHelper(deleteImageProc, deleteImageCtx));
- tex->setRelease(releaseProcHelper);
+ if (deleteImageProc) {
+ sk_sp<GrReleaseProcHelper> releaseProcHelper(
+ new GrReleaseProcHelper(deleteImageProc, deleteImageCtx));
+ tex->setRelease(releaseProcHelper);
+ }
return tex;
},
@@ -333,8 +579,8 @@
if (nullptr == context) {
return false; //CPU backend is not supported, because hardware buffer can be swizzled
}
- // TODO: add Vulkan support
- return kOpenGL_GrBackend == context->contextPriv().getBackend();
+ return kOpenGL_GrBackend == context->contextPriv().getBackend() ||
+ kVulkan_GrBackend == context->contextPriv().getBackend();
}
#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
diff --git a/src/gpu/GrAHardwareBufferImageGenerator.h b/src/gpu/GrAHardwareBufferImageGenerator.h
index f62741d..a5d7033 100644
--- a/src/gpu/GrAHardwareBufferImageGenerator.h
+++ b/src/gpu/GrAHardwareBufferImageGenerator.h
@@ -40,6 +40,10 @@
static void DeleteGLTexture(void* ctx);
+#ifdef SK_VULKAN
+ static void DeleteVkImage(void* ctx);
+#endif
+
protected:
bool onIsValid(GrContext*) const override;
diff --git a/src/gpu/vk/GrVkCaps.cpp b/src/gpu/vk/GrVkCaps.cpp
index 75e9639..4445a3a 100644
--- a/src/gpu/vk/GrVkCaps.cpp
+++ b/src/gpu/vk/GrVkCaps.cpp
@@ -214,6 +214,11 @@
}
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
+ extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
+ fSupportsBindMemory2 = true;
+ }
+
+ if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
fSupportsMaintenance1 = true;
}
@@ -229,7 +234,7 @@
}
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
- (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 3) &&
+ (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
this->supportsMemoryRequirements2())) {
fSupportsDedicatedAllocation = true;
}
@@ -243,11 +248,15 @@
}
#ifdef SK_BUILD_FOR_ANDROID
+ // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
+ // do we don't explicitly require it here even the spec says it is required.
if (extensions.hasExtension(
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 3) &&
- extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&
- this->supportsExternalMemory()) {
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
+ /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
+ this->supportsExternalMemory() &&
+ this->supportsBindMemory2()) {
fSupportsAndroidHWBExternalMemory = true;
+ fSupportsAHardwareBufferImages = true;
}
#endif
diff --git a/src/gpu/vk/GrVkCaps.h b/src/gpu/vk/GrVkCaps.h
index f46263c..aed6f7b 100644
--- a/src/gpu/vk/GrVkCaps.h
+++ b/src/gpu/vk/GrVkCaps.h
@@ -109,6 +109,10 @@
bool supportsPhysicalDeviceProperties2() const { return fSupportsPhysicalDeviceProperties2; }
// Returns whether the device supports the ability to extend VkMemoryRequirements struct.
bool supportsMemoryRequirements2() const { return fSupportsMemoryRequirements2; }
+
+ // Returns whether the device supports the ability to extend the vkBindMemory call.
+ bool supportsBindMemory2() const { return fSupportsBindMemory2; }
+
// Returns whether or not the device suports the various API maintenance fixes to Vulkan 1.0. In
// Vulkan 1.1 all these maintenance are part of the core spec.
bool supportsMaintenance1() const { return fSupportsMaintenance1; }
@@ -219,6 +223,7 @@
bool fSupportsPhysicalDeviceProperties2 = false;
bool fSupportsMemoryRequirements2 = false;
+ bool fSupportsBindMemory2 = false;
bool fSupportsMaintenance1 = false;
bool fSupportsMaintenance2 = false;
bool fSupportsMaintenance3 = false;
diff --git a/src/gpu/vk/GrVkGpu.cpp b/src/gpu/vk/GrVkGpu.cpp
index e8d62af..d68220b 100644
--- a/src/gpu/vk/GrVkGpu.cpp
+++ b/src/gpu/vk/GrVkGpu.cpp
@@ -118,6 +118,7 @@
, fInterface(std::move(interface))
, fMemoryAllocator(backendContext.fMemoryAllocator)
, fInstance(backendContext.fInstance)
+ , fPhysicalDevice(backendContext.fPhysicalDevice)
, fDevice(backendContext.fDevice)
, fQueue(backendContext.fQueue)
, fQueueIndex(backendContext.fGraphicsQueueIndex)
diff --git a/src/gpu/vk/GrVkGpu.h b/src/gpu/vk/GrVkGpu.h
index 18d47be..dc98ca7 100644
--- a/src/gpu/vk/GrVkGpu.h
+++ b/src/gpu/vk/GrVkGpu.h
@@ -51,6 +51,7 @@
GrVkMemoryAllocator* memoryAllocator() const { return fMemoryAllocator.get(); }
+ VkPhysicalDevice physicalDevice() const { return fPhysicalDevice; }
VkDevice device() const { return fDevice; }
VkQueue queue() const { return fQueue; }
uint32_t queueIndex() const { return fQueueIndex; }
@@ -233,6 +234,7 @@
sk_sp<GrVkCaps> fVkCaps;
VkInstance fInstance;
+ VkPhysicalDevice fPhysicalDevice;
VkDevice fDevice;
VkQueue fQueue; // Must be Graphics queue
uint32_t fQueueIndex;
diff --git a/src/gpu/vk/GrVkImage.cpp b/src/gpu/vk/GrVkImage.cpp
index a2e4495..db2a46a 100644
--- a/src/gpu/vk/GrVkImage.cpp
+++ b/src/gpu/vk/GrVkImage.cpp
@@ -216,7 +216,7 @@
void GrVkImage::releaseImage(const GrVkGpu* gpu) {
if (fInfo.fCurrentQueueFamily != fInitialQueueFamily) {
- this->setImageLayout(gpu, fInfo.fImageLayout, 0, 0, false, true);
+ this->setImageLayout(gpu, this->currentLayout(), 0, 0, false, true);
}
if (fResource) {
fResource->unref(gpu);
diff --git a/src/gpu/vk/GrVkInterface.cpp b/src/gpu/vk/GrVkInterface.cpp
index dfce529..1d0232f 100644
--- a/src/gpu/vk/GrVkInterface.cpp
+++ b/src/gpu/vk/GrVkInterface.cpp
@@ -198,6 +198,15 @@
ACQUIRE_PROC_SUFFIX(GetImageSparseMemoryRequirements2, KHR, VK_NULL_HANDLE, device);
}
+ // Functions for VK_KHR_bind_memory2
+ if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
+ ACQUIRE_PROC(BindBufferMemory2, VK_NULL_HANDLE, device);
+ ACQUIRE_PROC(BindImageMemory2, VK_NULL_HANDLE, device);
+ } else if (extensions->hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
+ ACQUIRE_PROC_SUFFIX(BindBufferMemory2, KHR, VK_NULL_HANDLE, device);
+ ACQUIRE_PROC_SUFFIX(BindImageMemory2, KHR, VK_NULL_HANDLE, device);
+ }
+
// Functions for VK_KHR_maintenance1 or vulkan 1.1
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
ACQUIRE_PROC(TrimCommandPool, VK_NULL_HANDLE, device);
@@ -223,7 +232,7 @@
#ifdef SK_BUILD_FOR_ANDROID
// Functions for VK_ANDROID_external_memory_android_hardware_buffer
if (extensions->hasExtension(
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 3)) {
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2)) {
ACQUIRE_PROC_SUFFIX(GetAndroidHardwareBufferProperties, ANDROID, VK_NULL_HANDLE, device);
ACQUIRE_PROC_SUFFIX(GetMemoryAndroidHardwareBuffer, ANDROID, VK_NULL_HANDLE, device);
}
@@ -406,6 +415,15 @@
}
}
+ // Functions for VK_KHR_bind_memory2
+ if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
+ extensions->hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
+ if (nullptr == fFunctions.fBindBufferMemory2 ||
+ nullptr == fFunctions.fBindImageMemory2) {
+ RETURN_FALSE_INTERFACE
+ }
+ }
+
// Functions for VK_KHR_maintenance1 or vulkan 1.1
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
extensions->hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
@@ -433,7 +451,7 @@
#ifdef SK_BUILD_FOR_ANDROID
// Functions for VK_ANDROID_external_memory_android_hardware_buffer
if (extensions->hasExtension(
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 3)) {
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2)) {
if (nullptr == fFunctions.fGetAndroidHardwareBufferProperties ||
nullptr == fFunctions.fGetMemoryAndroidHardwareBuffer) {
RETURN_FALSE_INTERFACE
diff --git a/src/gpu/vk/GrVkInterface.h b/src/gpu/vk/GrVkInterface.h
index d7ec5bb..d1de6ed 100644
--- a/src/gpu/vk/GrVkInterface.h
+++ b/src/gpu/vk/GrVkInterface.h
@@ -206,6 +206,10 @@
VkPtr<PFN_vkGetBufferMemoryRequirements2> fGetBufferMemoryRequirements2;
VkPtr<PFN_vkGetImageSparseMemoryRequirements2> fGetImageSparseMemoryRequirements2;
+ //Functions for VK_KHR_bind_memory2
+ VkPtr<PFN_vkBindBufferMemory2> fBindBufferMemory2;
+ VkPtr<PFN_vkBindImageMemory2> fBindImageMemory2;
+
// Functions for VK_KHR_maintenance1 or vulkan 1.1
VkPtr<PFN_vkTrimCommandPool> fTrimCommandPool;
diff --git a/tests/GrAHardwareBufferTest.cpp b/tests/GrAHardwareBufferTest.cpp
index 2c698cb..0feaacb 100644
--- a/tests/GrAHardwareBufferTest.cpp
+++ b/tests/GrAHardwareBufferTest.cpp
@@ -82,7 +82,10 @@
ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
x, y, srcPixel, dstPixel);
result = false;
- }
+ }/* else {
+ SkDebugf("Got good pixel (%d, %d) value 0x%08x, got 0x%08x.\n",
+ x, y, srcPixel, dstPixel);
+ }*/
}
}
return result;