blob: e329583a0371ad41f26701e542eecf186a5b1500 [file] [log] [blame]
bsalomon18a2f9d2016-05-11 10:09:18 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "VkTestContext.h"
9
10#ifdef SK_VULKAN
11
Greg Daniel02611d92017-07-25 10:05:01 -040012#include "GrContext.h"
bsalomonedea94c2016-05-16 14:09:56 -070013#include "vk/GrVkInterface.h"
14#include "vk/GrVkUtil.h"
Greg Danielec6ae522017-04-25 13:38:26 -040015#include <vulkan/vulkan.h>
bsalomonedea94c2016-05-16 14:09:56 -070016
bsalomon18a2f9d2016-05-11 10:09:18 -070017namespace {
bsalomonedea94c2016-05-16 14:09:56 -070018/**
csmartdalton421a3c12016-10-04 11:08:45 -070019 * Implements sk_gpu_test::FenceSync for Vulkan. It creates a single command
20 * buffer with USAGE_SIMULTANEOUS with no content . On every insertFence request
21 * it submits the command buffer with a new fence.
bsalomonedea94c2016-05-16 14:09:56 -070022 */
csmartdalton421a3c12016-10-04 11:08:45 -070023class VkFenceSync : public sk_gpu_test::FenceSync {
bsalomonedea94c2016-05-16 14:09:56 -070024public:
25 VkFenceSync(sk_sp<const GrVkInterface> vk, VkDevice device, VkQueue queue,
26 uint32_t queueFamilyIndex)
27 : fVk(std::move(vk))
28 , fDevice(device)
29 , fQueue(queue) {
30 SkDEBUGCODE(fUnfinishedSyncs = 0;)
31 VkCommandPoolCreateInfo createInfo;
32 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
33 createInfo.pNext = nullptr;
34 createInfo.flags = 0;
35 createInfo.queueFamilyIndex = queueFamilyIndex;
36 GR_VK_CALL_ERRCHECK(fVk, CreateCommandPool(fDevice, &createInfo, nullptr, &fCommandPool));
37
38 VkCommandBufferAllocateInfo allocateInfo;
39 allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
40 allocateInfo.pNext = nullptr;
41 allocateInfo.commandBufferCount = 1;
42 allocateInfo.commandPool = fCommandPool;
43 allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
44 GR_VK_CALL_ERRCHECK(fVk, AllocateCommandBuffers(fDevice, &allocateInfo, &fCommandBuffer));
45
46 VkCommandBufferBeginInfo beginInfo;
47 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
48 beginInfo.pNext = nullptr;
49 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
50 beginInfo.pInheritanceInfo = nullptr;
51 GR_VK_CALL_ERRCHECK(fVk, BeginCommandBuffer(fCommandBuffer, &beginInfo));
52 GR_VK_CALL_ERRCHECK(fVk, EndCommandBuffer(fCommandBuffer));
53 }
54
55 ~VkFenceSync() override {
56 SkASSERT(!fUnfinishedSyncs);
57 // If the above assertion is true then the command buffer should not be in flight.
58 GR_VK_CALL(fVk, FreeCommandBuffers(fDevice, fCommandPool, 1, &fCommandBuffer));
59 GR_VK_CALL(fVk, DestroyCommandPool(fDevice, fCommandPool, nullptr));
60 }
61
csmartdalton421a3c12016-10-04 11:08:45 -070062 sk_gpu_test::PlatformFence SK_WARN_UNUSED_RESULT insertFence() const override {
bsalomonedea94c2016-05-16 14:09:56 -070063 VkFence fence;
64 VkFenceCreateInfo info;
65 info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
66 info.pNext = nullptr;
67 info.flags = 0;
68 GR_VK_CALL_ERRCHECK(fVk, CreateFence(fDevice, &info, nullptr, &fence));
69 VkSubmitInfo submitInfo;
70 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
71 submitInfo.pNext = nullptr;
72 submitInfo.waitSemaphoreCount = 0;
73 submitInfo.pWaitSemaphores = nullptr;
74 submitInfo.pWaitDstStageMask = nullptr;
75 submitInfo.commandBufferCount = 1;
76 submitInfo.pCommandBuffers = &fCommandBuffer;
77 submitInfo.signalSemaphoreCount = 0;
78 submitInfo.pSignalSemaphores = nullptr;
79 GR_VK_CALL_ERRCHECK(fVk, QueueSubmit(fQueue, 1, &submitInfo, fence));
80 SkDEBUGCODE(++fUnfinishedSyncs;)
Brian Osmanbbdf45e2016-10-10 17:04:52 -040081 return (sk_gpu_test::PlatformFence)fence;
bsalomonedea94c2016-05-16 14:09:56 -070082 }
83
csmartdalton421a3c12016-10-04 11:08:45 -070084 bool waitFence(sk_gpu_test::PlatformFence opaqueFence) const override {
Brian Osmanbbdf45e2016-10-10 17:04:52 -040085 VkFence fence = (VkFence)opaqueFence;
bsalomonedea94c2016-05-16 14:09:56 -070086 static constexpr uint64_t kForever = ~((uint64_t)0);
87 auto result = GR_VK_CALL(fVk, WaitForFences(fDevice, 1, &fence, true, kForever));
88 return result != VK_TIMEOUT;
89 }
90
csmartdalton421a3c12016-10-04 11:08:45 -070091 void deleteFence(sk_gpu_test::PlatformFence opaqueFence) const override {
Brian Osmanbbdf45e2016-10-10 17:04:52 -040092 VkFence fence = (VkFence)opaqueFence;
bsalomonedea94c2016-05-16 14:09:56 -070093 GR_VK_CALL(fVk, DestroyFence(fDevice, fence, nullptr));
94 SkDEBUGCODE(--fUnfinishedSyncs;)
95 }
96
97private:
98 sk_sp<const GrVkInterface> fVk;
99 VkDevice fDevice;
100 VkQueue fQueue;
101 VkCommandPool fCommandPool;
102 VkCommandBuffer fCommandBuffer;
103 SkDEBUGCODE(mutable int fUnfinishedSyncs;)
csmartdalton421a3c12016-10-04 11:08:45 -0700104 typedef sk_gpu_test::FenceSync INHERITED;
bsalomonedea94c2016-05-16 14:09:56 -0700105};
106
csmartdalton024229a2016-10-04 14:24:23 -0700107GR_STATIC_ASSERT(sizeof(VkFence) <= sizeof(sk_gpu_test::PlatformFence));
108
bsalomonedea94c2016-05-16 14:09:56 -0700109// TODO: Implement swap buffers and finish
bsalomon18a2f9d2016-05-11 10:09:18 -0700110class VkTestContextImpl : public sk_gpu_test::VkTestContext {
111public:
Greg Daniel604b1972017-05-15 13:50:35 -0400112 static VkTestContext* Create(VkTestContext* sharedContext) {
113 sk_sp<const GrVkBackendContext> backendContext;
114 if (sharedContext) {
115 backendContext = sharedContext->getVkBackendContext();
116 } else {
117 backendContext.reset(GrVkBackendContext::Create(vkGetInstanceProcAddr,
118 vkGetDeviceProcAddr));
119 }
bsalomonedea94c2016-05-16 14:09:56 -0700120 if (!backendContext) {
121 return nullptr;
122 }
123 return new VkTestContextImpl(std::move(backendContext));
124 }
bsalomon18a2f9d2016-05-11 10:09:18 -0700125
126 ~VkTestContextImpl() override { this->teardown(); }
127
128 void testAbandon() override {}
129
bsalomonedea94c2016-05-16 14:09:56 -0700130 // There is really nothing to here since we don't own any unqueued command buffers here.
bsalomonc8699322016-05-11 11:55:36 -0700131 void submit() override {}
bsalomonedea94c2016-05-16 14:09:56 -0700132
bsalomonc8699322016-05-11 11:55:36 -0700133 void finish() override {}
134
Greg Daniel02611d92017-07-25 10:05:01 -0400135 sk_sp<GrContext> makeGrContext(const GrContextOptions& options) override {
136 return GrContext::MakeVulkan(fVk.get(), options);
137 }
138
bsalomon18a2f9d2016-05-11 10:09:18 -0700139protected:
bsalomonedea94c2016-05-16 14:09:56 -0700140 void teardown() override {
141 INHERITED::teardown();
142 fVk.reset(nullptr);
143 }
bsalomon18a2f9d2016-05-11 10:09:18 -0700144
145private:
bsalomonedea94c2016-05-16 14:09:56 -0700146 VkTestContextImpl(sk_sp<const GrVkBackendContext> backendContext)
147 : VkTestContext(std::move(backendContext)) {
Ben Wagner145dbcd2016-11-03 14:40:50 -0400148 fFenceSync.reset(new VkFenceSync(fVk->fInterface, fVk->fDevice, fVk->fQueue,
149 fVk->fGraphicsQueueIndex));
bsalomonedea94c2016-05-16 14:09:56 -0700150 }
151
bsalomon18a2f9d2016-05-11 10:09:18 -0700152 void onPlatformMakeCurrent() const override {}
153 void onPlatformSwapBuffers() const override {}
154
155 typedef sk_gpu_test::VkTestContext INHERITED;
156};
csmartdalton421a3c12016-10-04 11:08:45 -0700157} // anonymous namespace
bsalomon18a2f9d2016-05-11 10:09:18 -0700158
159namespace sk_gpu_test {
Greg Daniel604b1972017-05-15 13:50:35 -0400160VkTestContext* CreatePlatformVkTestContext(VkTestContext* sharedContext) {
161 return VkTestContextImpl::Create(sharedContext);
162}
bsalomon18a2f9d2016-05-11 10:09:18 -0700163} // namespace sk_gpu_test
164
165#endif