blob: 19efbec6a357f6b891a3179198a3c414be56c7ad [file] [log] [blame]
Sergey Ulanov2739fd22019-08-11 22:46:33 -07001/*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "include/core/SkTypes.h"
9
10#if SK_SUPPORT_GPU && defined(SK_VULKAN)
11
12#include "include/core/SkImage.h"
13#include "include/core/SkSurface.h"
14#include "include/gpu/GrContext.h"
15#include "include/gpu/vk/GrVkBackendContext.h"
16#include "include/gpu/vk/GrVkExtensions.h"
17#include "tests/Test.h"
18#include "tools/gpu/vk/VkTestUtils.h"
19
20const size_t kImageWidth = 8;
21const size_t kImageHeight = 8;
22
23static int getY(size_t x, size_t y) {
24 return 16 + (x + y) * 219 / (kImageWidth + kImageHeight - 2);
25}
26static int getU(size_t x, size_t y) { return 16 + x * 224 / (kImageWidth - 1); }
27static int getV(size_t x, size_t y) { return 16 + y * 224 / (kImageHeight - 1); }
28
29#define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
30
31#define ACQUIRE_INST_VK_PROC(name) \
32 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
33 VK_NULL_HANDLE)); \
34 if (fVk##name == nullptr) { \
35 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
36 return false; \
37 }
38
39#define ACQUIRE_DEVICE_VK_PROC(name) \
40 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
41 if (fVk##name == nullptr) { \
42 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
43 return false; \
44 }
45
46class VkYcbcrSamplerTestHelper {
47public:
48 VkYcbcrSamplerTestHelper() {}
49 ~VkYcbcrSamplerTestHelper();
50
51 bool init(skiatest::Reporter* reporter);
52
53 sk_sp<SkImage> createI420Image(skiatest::Reporter* reporter);
54
55 GrContext* getGrContext() { return fGrContext.get(); }
56
57private:
58 GrVkExtensions fExtensions;
59 VkPhysicalDeviceFeatures2 fFeatures = {};
60 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
61
62 DECLARE_VK_PROC(DestroyInstance);
63 DECLARE_VK_PROC(DeviceWaitIdle);
64 DECLARE_VK_PROC(DestroyDevice);
65
66 DECLARE_VK_PROC(GetPhysicalDeviceFormatProperties);
67 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties);
68
69 DECLARE_VK_PROC(CreateImage);
70 DECLARE_VK_PROC(DestroyImage);
71 DECLARE_VK_PROC(GetImageMemoryRequirements);
72 DECLARE_VK_PROC(AllocateMemory);
73 DECLARE_VK_PROC(FreeMemory);
74 DECLARE_VK_PROC(BindImageMemory);
75 DECLARE_VK_PROC(MapMemory);
76 DECLARE_VK_PROC(UnmapMemory);
77 DECLARE_VK_PROC(FlushMappedMemoryRanges);
78 DECLARE_VK_PROC(GetImageSubresourceLayout);
79
80 VkDevice fDevice = VK_NULL_HANDLE;
81
82 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
83
84 GrVkBackendContext fBackendContext;
85 sk_sp<GrContext> fGrContext;
86
87 VkImage fImage = VK_NULL_HANDLE;
88 VkDeviceMemory fImageMemory = VK_NULL_HANDLE;
89 GrBackendTexture texture;
90};
91
92VkYcbcrSamplerTestHelper::~VkYcbcrSamplerTestHelper() {
93 fGrContext.reset();
94
95 if (fImage != VK_NULL_HANDLE) {
96 fVkDestroyImage(fDevice, fImage, nullptr);
97 fImage = VK_NULL_HANDLE;
98 }
99 if (fImageMemory != VK_NULL_HANDLE) {
100 fVkFreeMemory(fDevice, fImageMemory, nullptr);
101 fImageMemory = VK_NULL_HANDLE;
102 }
103
104 fBackendContext.fMemoryAllocator.reset();
105 if (fDevice != VK_NULL_HANDLE) {
106 fVkDeviceWaitIdle(fDevice);
107 fVkDestroyDevice(fDevice, nullptr);
108 fDevice = VK_NULL_HANDLE;
109 }
110 if (fDebugCallback != VK_NULL_HANDLE) {
111 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
112 }
113 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
114 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
115 fBackendContext.fInstance = VK_NULL_HANDLE;
116 }
117
118 sk_gpu_test::FreeVulkanFeaturesStructs(&fFeatures);
119}
120
121bool VkYcbcrSamplerTestHelper::init(skiatest::Reporter* reporter) {
122 PFN_vkGetInstanceProcAddr instProc;
123 PFN_vkGetDeviceProcAddr devProc;
124 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
125 ERRORF(reporter, "Failed to load Vulkan");
126 return false;
127 }
128 auto getProc = [&instProc, &devProc](const char* proc_name,
129 VkInstance instance, VkDevice device) {
130 if (device != VK_NULL_HANDLE) {
131 return devProc(device, proc_name);
132 }
133 return instProc(instance, proc_name);
134 };
135
136 fFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
137 fFeatures.pNext = nullptr;
138
139 fBackendContext.fInstance = VK_NULL_HANDLE;
140 fBackendContext.fDevice = VK_NULL_HANDLE;
141
142 if (!sk_gpu_test::CreateVkBackendContext(getProc, &fBackendContext, &fExtensions, &fFeatures,
143 &fDebugCallback, nullptr, sk_gpu_test::CanPresentFn(),
144 false)) {
145 return false;
146 }
147 fDevice = fBackendContext.fDevice;
148
149 if (fDebugCallback != VK_NULL_HANDLE) {
150 fDestroyDebugCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
151 instProc(fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT"));
152 }
153 ACQUIRE_INST_VK_PROC(DestroyInstance)
154 ACQUIRE_INST_VK_PROC(DeviceWaitIdle)
155 ACQUIRE_INST_VK_PROC(DestroyDevice)
156
157 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceFormatProperties)
158 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties)
159
160 ACQUIRE_DEVICE_VK_PROC(CreateImage)
161 ACQUIRE_DEVICE_VK_PROC(DestroyImage)
162 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements)
163 ACQUIRE_DEVICE_VK_PROC(AllocateMemory)
164 ACQUIRE_DEVICE_VK_PROC(FreeMemory)
165 ACQUIRE_DEVICE_VK_PROC(BindImageMemory)
166 ACQUIRE_DEVICE_VK_PROC(MapMemory)
167 ACQUIRE_DEVICE_VK_PROC(UnmapMemory)
168 ACQUIRE_DEVICE_VK_PROC(FlushMappedMemoryRanges)
169 ACQUIRE_DEVICE_VK_PROC(GetImageSubresourceLayout)
170
171 bool ycbcrSupported = false;
172 VkBaseOutStructure* feature = reinterpret_cast<VkBaseOutStructure*>(fFeatures.pNext);
173 while (feature) {
174 if (feature->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES) {
175 VkPhysicalDeviceSamplerYcbcrConversionFeatures* ycbcrFeatures =
176 reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>(feature);
177 ycbcrSupported = ycbcrFeatures->samplerYcbcrConversion;
178 break;
179 }
180 feature = feature->pNext;
181 }
182 if (!ycbcrSupported) {
183 return false;
184 }
185
186 fGrContext = GrContext::MakeVulkan(fBackendContext);
187 if (!fGrContext) {
188 return false;
189 }
190
191 return true;
192}
193
194sk_sp<SkImage> VkYcbcrSamplerTestHelper::createI420Image(skiatest::Reporter* reporter) {
195 // Verify that the image format is supported.
196 VkFormatProperties formatProperties;
197 fVkGetPhysicalDeviceFormatProperties(fBackendContext.fPhysicalDevice,
198 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, &formatProperties);
199 if (!(formatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
200 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
201 return nullptr;
202 }
203
204 // Create YCbCr image.
205 VkImageCreateInfo vkImageInfo = {};
206 vkImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
207 vkImageInfo.imageType = VK_IMAGE_TYPE_2D;
208 vkImageInfo.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
209 vkImageInfo.extent = VkExtent3D{kImageWidth, kImageHeight, 1};
210 vkImageInfo.mipLevels = 1;
211 vkImageInfo.arrayLayers = 1;
212 vkImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
213 vkImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
214 vkImageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
215 vkImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
216 vkImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
217
218 REPORTER_ASSERT(reporter, fImage == VK_NULL_HANDLE);
219 if (fVkCreateImage(fDevice, &vkImageInfo, nullptr, &fImage) != VK_SUCCESS) {
220 ERRORF(reporter, "Failed to allocate I420 image");
221 return nullptr;
222 }
223
224 VkMemoryRequirements requirements;
225 fVkGetImageMemoryRequirements(fDevice, fImage, &requirements);
226
227 uint32_t memoryTypeIndex = 0;
228 bool foundHeap = false;
229 VkPhysicalDeviceMemoryProperties phyDevMemProps;
230 fVkGetPhysicalDeviceMemoryProperties(fBackendContext.fPhysicalDevice, &phyDevMemProps);
231 for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount && !foundHeap; ++i) {
232 if (requirements.memoryTypeBits & (1 << i)) {
233 // Map host-visible memory.
234 if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
235 memoryTypeIndex = i;
236 foundHeap = true;
237 }
238 }
239 }
240 if (!foundHeap) {
241 ERRORF(reporter, "Failed to find valid heap for imported memory");
242 return nullptr;
243 }
244
245 VkMemoryAllocateInfo allocInfo = {};
246 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
247 allocInfo.allocationSize = requirements.size;
248 allocInfo.memoryTypeIndex = memoryTypeIndex;
249
250 REPORTER_ASSERT(reporter, fImageMemory == VK_NULL_HANDLE);
251 if (fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fImageMemory) != VK_SUCCESS) {
252 ERRORF(reporter, "Failed to allocate VkDeviceMemory.");
253 return nullptr;
254 }
255
256 void* mappedBuffer;
257 if (fVkMapMemory(fDevice, fImageMemory, 0u, requirements.size, 0u, &mappedBuffer) !=
258 VK_SUCCESS) {
259 ERRORF(reporter, "Failed to map Vulkan memory.");
260 return nullptr;
261 }
262
263 // Write Y channel.
264 VkImageSubresource subresource;
265 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
266 subresource.mipLevel = 0;
267 subresource.arrayLayer = 0;
268
269 VkSubresourceLayout yLayout;
270 fVkGetImageSubresourceLayout(fDevice, fImage, &subresource, &yLayout);
271 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
272 for (size_t y = 0; y < kImageHeight; ++y) {
273 for (size_t x = 0; x < kImageWidth; ++x) {
274 bufferData[y * yLayout.rowPitch + x] = getY(x, y);
275 }
276 }
277
278 // Write UV channels.
279 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
280 VkSubresourceLayout uvLayout;
281 fVkGetImageSubresourceLayout(fDevice, fImage, &subresource, &uvLayout);
282 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
283 for (size_t y = 0; y < kImageHeight / 2; ++y) {
284 for (size_t x = 0; x < kImageWidth / 2; ++x) {
285 bufferData[y * uvLayout.rowPitch + x * 2] = getU(x * 2, y * 2);
286 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = getV(x * 2, y * 2);
287 }
288 }
289
290 VkMappedMemoryRange flushRange;
291 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
292 flushRange.pNext = nullptr;
293 flushRange.memory = fImageMemory;
294 flushRange.offset = 0;
295 flushRange.size = VK_WHOLE_SIZE;
296 if (fVkFlushMappedMemoryRanges(fDevice, 1, &flushRange) != VK_SUCCESS) {
297 ERRORF(reporter, "Failed to flush buffer memory.");
298 return nullptr;
299 }
300 fVkUnmapMemory(fDevice, fImageMemory);
301
302 // Bind image memory.
303 if (fVkBindImageMemory(fDevice, fImage, fImageMemory, 0u) != VK_SUCCESS) {
304 ERRORF(reporter, "Failed to bind VkImage memory.");
305 return nullptr;
306 }
307
308 // Wrap the image into SkImage.
309 GrVkYcbcrConversionInfo ycbcrInfo(vkImageInfo.format,
310 /*externalFormat=*/0,
311 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
312 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
313 VK_CHROMA_LOCATION_COSITED_EVEN,
314 VK_CHROMA_LOCATION_COSITED_EVEN,
315 VK_FILTER_LINEAR,
316 false,
317 formatProperties.linearTilingFeatures);
318 GrVkAlloc alloc(fImageMemory, 0 /* offset */, requirements.size, 0 /* flags */);
319 GrVkImageInfo imageInfo(fImage, alloc, VK_IMAGE_TILING_LINEAR, VK_IMAGE_LAYOUT_UNDEFINED,
320 vkImageInfo.format, 1 /* levelCount */, VK_QUEUE_FAMILY_IGNORED,
321 GrProtected::kNo, ycbcrInfo);
322
323 texture = GrBackendTexture(kImageWidth, kImageHeight, imageInfo);
324 sk_sp<SkImage> image = SkImage::MakeFromTexture(fGrContext.get(),
325 texture,
326 kTopLeft_GrSurfaceOrigin,
327 kRGB_888x_SkColorType,
328 kPremul_SkAlphaType,
329 nullptr);
330
331 if (!image) {
332 ERRORF(reporter, "Failed to wrap VkImage with SkImage");
333 return nullptr;
334 }
335
336 return image;
337}
338
339static int round_and_clamp(float x) {
340 int r = static_cast<int>(round(x));
341 if (r > 255) return 255;
342 if (r < 0) return 0;
343 return r;
344}
345
346DEF_GPUTEST(VkYCbcrSampler_DrawImageWithYcbcrSampler, reporter, options) {
347 VkYcbcrSamplerTestHelper helper;
348 if (!helper.init(reporter)) {
349 return;
350 }
351
352 sk_sp<SkImage> srcImage = helper.createI420Image(reporter);
353 if (!srcImage) {
354 return;
355 }
356
357 sk_sp<SkSurface> surface = SkSurface::MakeRenderTarget(
358 helper.getGrContext(), SkBudgeted::kNo,
359 SkImageInfo::Make(kImageWidth, kImageHeight, kN32_SkColorType, kPremul_SkAlphaType));
360 if (!surface) {
361 ERRORF(reporter, "Failed to create target SkSurface");
362 return;
363 }
364 surface->getCanvas()->drawImage(srcImage, 0, 0);
365 surface->flush();
366
367 std::vector<uint8_t> readbackData(kImageWidth * kImageHeight * 4);
368 if (!surface->readPixels(SkImageInfo::Make(kImageWidth, kImageHeight, kRGBA_8888_SkColorType,
369 kOpaque_SkAlphaType),
370 readbackData.data(), kImageWidth * 4, 0, 0)) {
371 ERRORF(reporter, "Readback failed");
372 return;
373 }
374
375 // Allow resulting color to be off by 1 in each channel as some Vulkan implementations do not
376 // round YCbCr sampler result properly.
377 const int kColorTolerance = 1;
378
379 // Verify results only for pixels with even coordinates, since others use
380 // interpolated U & V channels.
381 for (size_t y = 0; y < kImageHeight; y += 2) {
382 for (size_t x = 0; x < kImageWidth; x += 2) {
383 // createI420Image() initializes the image with VK_SAMPLER_YCBCR_RANGE_ITU_NARROW.
384 float yChannel = (static_cast<float>(getY(x, y)) - 16.0) / 219.0;
385 float uChannel = (static_cast<float>(getU(x, y)) - 128.0) / 224.0;
386 float vChannel = (static_cast<float>(getV(x, y)) - 128.0) / 224.0;
387
388 // BR.709 conversion as specified in
389 // https://www.khronos.org/registry/DataFormat/specs/1.2/dataformat.1.2.html#MODEL_YUV
390 int expectedR = round_and_clamp((yChannel + 1.5748f * vChannel) * 255.0);
391 int expectedG = round_and_clamp((yChannel - 0.13397432f / 0.7152f * uChannel -
392 0.33480248f / 0.7152f * vChannel) *
393 255.0);
394 int expectedB = round_and_clamp((yChannel + 1.8556f * uChannel) * 255.0);
395
396 int r = readbackData[(y * kImageWidth + x) * 4];
397 if (abs(r - expectedR) > kColorTolerance) {
398 ERRORF(reporter, "R should be %d, but is %d at (%d, %d)", expectedR, r, x, y);
399 }
400
401 int g = readbackData[(y * kImageWidth + x) * 4 + 1];
402 if (abs(g - expectedG) > kColorTolerance) {
403 ERRORF(reporter, "G should be %d, but is %d at (%d, %d)", expectedG, g, x, y);
404 }
405
406 int b = readbackData[(y * kImageWidth + x) * 4 + 2];
407 if (abs(b - expectedB) > kColorTolerance) {
408 ERRORF(reporter, "B should be %d, but is %d at (%d, %d)", expectedB, b, x, y);
409 }
410 }
411 }
412}
413
414// Verifies that it's not possible to allocate Ycbcr texture directly.
415DEF_GPUTEST(VkYCbcrSampler_NoYcbcrSurface, reporter, options) {
416 VkYcbcrSamplerTestHelper helper;
417 if (!helper.init(reporter)) {
418 return;
419 }
420
421 GrBackendTexture texture = helper.getGrContext()->createBackendTexture(
422 kImageWidth, kImageHeight, GrBackendFormat::MakeVk(VK_FORMAT_G8_B8R8_2PLANE_420_UNORM),
423 GrMipMapped::kNo, GrRenderable::kNo, GrProtected::kNo);
424 if (texture.isValid()) {
425 ERRORF(reporter,
426 "GrContext::createBackendTexture() didn't fail as expected for Ycbcr format.");
427 }
428}
429
430#endif // SK_SUPPORT_GPU && defined(SK_VULKAN)