blob: 82e688c5c1bd41664d8c77ed9087bc6df531c15e [file] [log] [blame]
Robert Phillipsae413d82020-06-10 11:04:51 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
9
10#ifdef SK_VULKAN
11
Robert Phillips057c33f2020-07-17 11:59:01 -040012#include "include/gpu/GrDirectContext.h"
Adlai Hollera0693042020-10-14 11:23:11 -040013#include "src/gpu/GrDirectContextPriv.h"
Robert Phillips85aa4282020-06-11 10:54:43 -040014#include "src/gpu/vk/GrVkGpu.h"
15#include "src/gpu/vk/GrVkUtil.h"
Robert Phillipsae413d82020-06-10 11:04:51 -040016
17int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
18 return 16 + (x + y) * 219 / (width + height - 2);
19}
20
21std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
22 return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
23}
24
Robert Phillips85aa4282020-06-11 10:54:43 -040025GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
Robert Phillips057c33f2020-07-17 11:59:01 -040026 return (GrVkGpu*) fDContext->priv().getGpu();
Robert Phillips85aa4282020-06-11 10:54:43 -040027}
Robert Phillipsae413d82020-06-10 11:04:51 -040028
Robert Phillips057c33f2020-07-17 11:59:01 -040029VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
30 SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
Robert Phillips85aa4282020-06-11 10:54:43 -040031}
Robert Phillipsae413d82020-06-10 11:04:51 -040032
33VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
Robert Phillips85aa4282020-06-11 10:54:43 -040034 GrVkGpu* vkGpu = this->vkGpu();
Robert Phillipsae413d82020-06-10 11:04:51 -040035
36 if (fImage != VK_NULL_HANDLE) {
Robert Phillips85aa4282020-06-11 10:54:43 -040037 GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
Robert Phillipsae413d82020-06-10 11:04:51 -040038 fImage = VK_NULL_HANDLE;
39 }
40 if (fImageMemory != VK_NULL_HANDLE) {
Robert Phillips85aa4282020-06-11 10:54:43 -040041 GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
Robert Phillipsae413d82020-06-10 11:04:51 -040042 fImageMemory = VK_NULL_HANDLE;
43 }
Robert Phillipsae413d82020-06-10 11:04:51 -040044}
45
Robert Phillips85aa4282020-06-11 10:54:43 -040046bool VkYcbcrSamplerHelper::isYCbCrSupported() {
47 GrVkGpu* vkGpu = this->vkGpu();
Robert Phillipsae413d82020-06-10 11:04:51 -040048
Robert Phillips85aa4282020-06-11 10:54:43 -040049 return vkGpu->vkCaps().supportsYcbcrConversion();
Robert Phillipsae413d82020-06-10 11:04:51 -040050}
51
Robert Phillips5f0cda42020-06-15 14:26:58 -040052bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
Robert Phillips85aa4282020-06-11 10:54:43 -040053 GrVkGpu* vkGpu = this->vkGpu();
54 VkResult result;
55
Robert Phillipsae413d82020-06-10 11:04:51 -040056 // Verify that the image format is supported.
57 VkFormatProperties formatProperties;
Robert Phillips85aa4282020-06-11 10:54:43 -040058 GR_VK_CALL(vkGpu->vkInterface(),
59 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
60 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
61 &formatProperties));
Robert Phillipsae413d82020-06-10 11:04:51 -040062 if (!(formatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
63 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
Robert Phillips5f0cda42020-06-15 14:26:58 -040064 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -040065 }
66
67 // Create YCbCr image.
68 VkImageCreateInfo vkImageInfo = {};
69 vkImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
70 vkImageInfo.imageType = VK_IMAGE_TYPE_2D;
71 vkImageInfo.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
72 vkImageInfo.extent = VkExtent3D{width, height, 1};
73 vkImageInfo.mipLevels = 1;
74 vkImageInfo.arrayLayers = 1;
75 vkImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
76 vkImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
Greg Daniel7b62dca2020-08-21 11:26:12 -040077 vkImageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
78 VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
Robert Phillipsae413d82020-06-10 11:04:51 -040079 vkImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
80 vkImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
81
82 SkASSERT(fImage == VK_NULL_HANDLE);
Robert Phillips85aa4282020-06-11 10:54:43 -040083 GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
84 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -040085 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -040086 }
87
88 VkMemoryRequirements requirements;
Robert Phillips85aa4282020-06-11 10:54:43 -040089 GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
90 fImage,
91 &requirements));
Robert Phillipsae413d82020-06-10 11:04:51 -040092
93 uint32_t memoryTypeIndex = 0;
94 bool foundHeap = false;
95 VkPhysicalDeviceMemoryProperties phyDevMemProps;
Robert Phillips85aa4282020-06-11 10:54:43 -040096 GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
97 &phyDevMemProps));
Robert Phillipsae413d82020-06-10 11:04:51 -040098 for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount && !foundHeap; ++i) {
99 if (requirements.memoryTypeBits & (1 << i)) {
100 // Map host-visible memory.
101 if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
102 memoryTypeIndex = i;
103 foundHeap = true;
104 }
105 }
106 }
107 if (!foundHeap) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400108 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400109 }
110
111 VkMemoryAllocateInfo allocInfo = {};
112 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
113 allocInfo.allocationSize = requirements.size;
114 allocInfo.memoryTypeIndex = memoryTypeIndex;
115
116 SkASSERT(fImageMemory == VK_NULL_HANDLE);
Robert Phillips85aa4282020-06-11 10:54:43 -0400117 GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
118 nullptr, &fImageMemory));
119 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400120 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400121 }
122
123 void* mappedBuffer;
Robert Phillips85aa4282020-06-11 10:54:43 -0400124 GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
125 requirements.size, 0u, &mappedBuffer));
126 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400127 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400128 }
129
130 // Write Y channel.
131 VkImageSubresource subresource;
132 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
133 subresource.mipLevel = 0;
134 subresource.arrayLayer = 0;
135
136 VkSubresourceLayout yLayout;
Robert Phillips85aa4282020-06-11 10:54:43 -0400137 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
138 &subresource, &yLayout));
Robert Phillipsae413d82020-06-10 11:04:51 -0400139 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
140 for (size_t y = 0; y < height; ++y) {
141 for (size_t x = 0; x < width; ++x) {
142 bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
143 }
144 }
145
146 // Write UV channels.
147 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
148 VkSubresourceLayout uvLayout;
Robert Phillips85aa4282020-06-11 10:54:43 -0400149 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
150 &subresource, &uvLayout));
Robert Phillipsae413d82020-06-10 11:04:51 -0400151 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
152 for (size_t y = 0; y < height / 2; ++y) {
153 for (size_t x = 0; x < width / 2; ++x) {
154 auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
155 bufferData[y * uvLayout.rowPitch + x * 2] = u;
156 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
157 }
158 }
159
160 VkMappedMemoryRange flushRange;
161 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
162 flushRange.pNext = nullptr;
163 flushRange.memory = fImageMemory;
164 flushRange.offset = 0;
165 flushRange.size = VK_WHOLE_SIZE;
Robert Phillips85aa4282020-06-11 10:54:43 -0400166 GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
167 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400168 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400169 }
Robert Phillips85aa4282020-06-11 10:54:43 -0400170 GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
Robert Phillipsae413d82020-06-10 11:04:51 -0400171
172 // Bind image memory.
Robert Phillips85aa4282020-06-11 10:54:43 -0400173 GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
174 if (result != VK_SUCCESS) {
Robert Phillips5f0cda42020-06-15 14:26:58 -0400175 return false;
Robert Phillipsae413d82020-06-10 11:04:51 -0400176 }
177
178 // Wrap the image into SkImage.
Greg Daniel6a4e1452020-08-20 14:35:18 -0400179 GrVkYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
180 /*externalFormat=*/0,
181 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
182 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
183 VK_CHROMA_LOCATION_COSITED_EVEN,
184 VK_CHROMA_LOCATION_COSITED_EVEN,
185 VK_FILTER_LINEAR,
186 false,
187 formatProperties.linearTilingFeatures};
188 GrVkAlloc alloc;
189 alloc.fMemory = fImageMemory;
190 alloc.fOffset = 0;
191 alloc.fSize = requirements.size;
192
193 GrVkImageInfo imageInfo = {fImage,
194 alloc,
195 VK_IMAGE_TILING_LINEAR,
196 VK_IMAGE_LAYOUT_UNDEFINED,
197 vkImageInfo.format,
Greg Daniel7b62dca2020-08-21 11:26:12 -0400198 vkImageInfo.usage,
Brian Salomon57fd9232020-09-28 17:02:49 -0400199 1 /* sample count */,
Greg Daniel6a4e1452020-08-20 14:35:18 -0400200 1 /* levelCount */,
201 VK_QUEUE_FAMILY_IGNORED,
202 GrProtected::kNo,
203 ycbcrInfo};
Robert Phillipsae413d82020-06-10 11:04:51 -0400204
205 fTexture = GrBackendTexture(width, height, imageInfo);
Robert Phillips5f0cda42020-06-15 14:26:58 -0400206 return true;
Robert Phillipsae413d82020-06-10 11:04:51 -0400207}
208
209#endif // SK_VULKAN