blob: 60c0be1ddc05c08e21ec95f04cfab0822ea400c6 [file] [log] [blame]
Robert Phillipsae413d82020-06-10 11:04:51 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
9
10#ifdef SK_VULKAN
11
Robert Phillips057c33f2020-07-17 11:59:01 -040012#include "include/gpu/GrDirectContext.h"
Robert Phillips85aa4282020-06-11 10:54:43 -040013#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/vk/GrVkGpu.h"
15#include "src/gpu/vk/GrVkUtil.h"
Robert Phillipsae413d82020-06-10 11:04:51 -040016
17int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
18 return 16 + (x + y) * 219 / (width + height - 2);
19}
20
21std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
22 return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
23}
24
Robert Phillips85aa4282020-06-11 10:54:43 -040025GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
Robert Phillips057c33f2020-07-17 11:59:01 -040026 return (GrVkGpu*) fDContext->priv().getGpu();
Robert Phillips85aa4282020-06-11 10:54:43 -040027}
Robert Phillipsae413d82020-06-10 11:04:51 -040028
Robert Phillips057c33f2020-07-17 11:59:01 -040029VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
30 SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
Robert Phillips85aa4282020-06-11 10:54:43 -040031}
Robert Phillipsae413d82020-06-10 11:04:51 -040032
33VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
Robert Phillips85aa4282020-06-11 10:54:43 -040034 GrVkGpu* vkGpu = this->vkGpu();
Robert Phillipsae413d82020-06-10 11:04:51 -040035
36 if (fImage != VK_NULL_HANDLE) {
Robert Phillips85aa4282020-06-11 10:54:43 -040037 GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
Robert Phillipsae413d82020-06-10 11:04:51 -040038 fImage = VK_NULL_HANDLE;
39 }
40 if (fImageMemory != VK_NULL_HANDLE) {
Robert Phillips85aa4282020-06-11 10:54:43 -040041 GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
Robert Phillipsae413d82020-06-10 11:04:51 -040042 fImageMemory = VK_NULL_HANDLE;
43 }
Robert Phillipsae413d82020-06-10 11:04:51 -040044}
45
Robert Phillips85aa4282020-06-11 10:54:43 -040046bool VkYcbcrSamplerHelper::isYCbCrSupported() {
47 GrVkGpu* vkGpu = this->vkGpu();
Robert Phillipsae413d82020-06-10 11:04:51 -040048
Robert Phillips85aa4282020-06-11 10:54:43 -040049 return vkGpu->vkCaps().supportsYcbcrConversion();
Robert Phillipsae413d82020-06-10 11:04:51 -040050}
51
Robert Phillips5f0cda42020-06-15 14:26:58 -040052bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
Robert Phillips85aa4282020-06-11 10:54:43 -040053 GrVkGpu* vkGpu = this->vkGpu();
54 VkResult result;
55
Robert Phillipsae413d82020-06-10 11:04:51 -040056 // Verify that the image format is supported.
57 VkFormatProperties formatProperties;
Robert Phillips85aa4282020-06-11 10:54:43 -040058 GR_VK_CALL(vkGpu->vkInterface(),
59 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
60 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
61 &formatProperties));
Robert Phillipsae413d82020-06-10 11:04:51 -040062 if (!(formatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
63 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
Robert Phillips5f0cda42020-06-15 14:26:58 -040064 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -040065 }
66
67 // Create YCbCr image.
68 VkImageCreateInfo vkImageInfo = {};
69 vkImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
70 vkImageInfo.imageType = VK_IMAGE_TYPE_2D;
71 vkImageInfo.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
72 vkImageInfo.extent = VkExtent3D{width, height, 1};
73 vkImageInfo.mipLevels = 1;
74 vkImageInfo.arrayLayers = 1;
75 vkImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
76 vkImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
77 vkImageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
78 vkImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
79 vkImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
80
81 SkASSERT(fImage == VK_NULL_HANDLE);
Robert Phillips85aa4282020-06-11 10:54:43 -040082 GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
83 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -040084 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -040085 }
86
87 VkMemoryRequirements requirements;
Robert Phillips85aa4282020-06-11 10:54:43 -040088 GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
89 fImage,
90 &requirements));
Robert Phillipsae413d82020-06-10 11:04:51 -040091
92 uint32_t memoryTypeIndex = 0;
93 bool foundHeap = false;
94 VkPhysicalDeviceMemoryProperties phyDevMemProps;
Robert Phillips85aa4282020-06-11 10:54:43 -040095 GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
96 &phyDevMemProps));
Robert Phillipsae413d82020-06-10 11:04:51 -040097 for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount && !foundHeap; ++i) {
98 if (requirements.memoryTypeBits & (1 << i)) {
99 // Map host-visible memory.
100 if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
101 memoryTypeIndex = i;
102 foundHeap = true;
103 }
104 }
105 }
106 if (!foundHeap) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400107 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400108 }
109
110 VkMemoryAllocateInfo allocInfo = {};
111 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
112 allocInfo.allocationSize = requirements.size;
113 allocInfo.memoryTypeIndex = memoryTypeIndex;
114
115 SkASSERT(fImageMemory == VK_NULL_HANDLE);
Robert Phillips85aa4282020-06-11 10:54:43 -0400116 GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
117 nullptr, &fImageMemory));
118 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400119 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400120 }
121
122 void* mappedBuffer;
Robert Phillips85aa4282020-06-11 10:54:43 -0400123 GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
124 requirements.size, 0u, &mappedBuffer));
125 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400126 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400127 }
128
129 // Write Y channel.
130 VkImageSubresource subresource;
131 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
132 subresource.mipLevel = 0;
133 subresource.arrayLayer = 0;
134
135 VkSubresourceLayout yLayout;
Robert Phillips85aa4282020-06-11 10:54:43 -0400136 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
137 &subresource, &yLayout));
Robert Phillipsae413d82020-06-10 11:04:51 -0400138 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
139 for (size_t y = 0; y < height; ++y) {
140 for (size_t x = 0; x < width; ++x) {
141 bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
142 }
143 }
144
145 // Write UV channels.
146 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
147 VkSubresourceLayout uvLayout;
Robert Phillips85aa4282020-06-11 10:54:43 -0400148 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
149 &subresource, &uvLayout));
Robert Phillipsae413d82020-06-10 11:04:51 -0400150 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
151 for (size_t y = 0; y < height / 2; ++y) {
152 for (size_t x = 0; x < width / 2; ++x) {
153 auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
154 bufferData[y * uvLayout.rowPitch + x * 2] = u;
155 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
156 }
157 }
158
159 VkMappedMemoryRange flushRange;
160 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
161 flushRange.pNext = nullptr;
162 flushRange.memory = fImageMemory;
163 flushRange.offset = 0;
164 flushRange.size = VK_WHOLE_SIZE;
Robert Phillips85aa4282020-06-11 10:54:43 -0400165 GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
166 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400167 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400168 }
Robert Phillips85aa4282020-06-11 10:54:43 -0400169 GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
Robert Phillipsae413d82020-06-10 11:04:51 -0400170
171 // Bind image memory.
Robert Phillips85aa4282020-06-11 10:54:43 -0400172 GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
173 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400174 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400175 }
176
177 // Wrap the image into SkImage.
178 GrVkYcbcrConversionInfo ycbcrInfo(vkImageInfo.format,
179 /*externalFormat=*/0,
180 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
181 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
182 VK_CHROMA_LOCATION_COSITED_EVEN,
183 VK_CHROMA_LOCATION_COSITED_EVEN,
184 VK_FILTER_LINEAR,
185 false,
186 formatProperties.linearTilingFeatures);
187 GrVkAlloc alloc(fImageMemory, 0 /* offset */, requirements.size, 0 /* flags */);
188 GrVkImageInfo imageInfo(fImage, alloc, VK_IMAGE_TILING_LINEAR, VK_IMAGE_LAYOUT_UNDEFINED,
189 vkImageInfo.format, 1 /* levelCount */, VK_QUEUE_FAMILY_IGNORED,
190 GrProtected::kNo, ycbcrInfo);
191
192 fTexture = GrBackendTexture(width, height, imageInfo);
Robert Phillips5f0cda42020-06-15 14:26:58 -0400193 return true;
Robert Phillipsae413d82020-06-10 11:04:51 -0400194}
195
196#endif // SK_VULKAN