blob: 592fb0f7b000f782aae984ff6563eaa4655517f3 [file] [log] [blame]
bsalomon18a2f9d2016-05-11 10:09:18 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "VkTestContext.h"
9
10#ifdef SK_VULKAN
11
Greg Daniel02611d92017-07-25 10:05:01 -040012#include "GrContext.h"
Greg Daniel35970ec2017-11-10 10:03:05 -050013#include "VkTestUtils.h"
bsalomonedea94c2016-05-16 14:09:56 -070014#include "vk/GrVkInterface.h"
15#include "vk/GrVkUtil.h"
bsalomonedea94c2016-05-16 14:09:56 -070016
bsalomon18a2f9d2016-05-11 10:09:18 -070017namespace {
bsalomonedea94c2016-05-16 14:09:56 -070018/**
csmartdalton421a3c12016-10-04 11:08:45 -070019 * Implements sk_gpu_test::FenceSync for Vulkan. It creates a single command
20 * buffer with USAGE_SIMULTANEOUS with no content . On every insertFence request
21 * it submits the command buffer with a new fence.
bsalomonedea94c2016-05-16 14:09:56 -070022 */
csmartdalton421a3c12016-10-04 11:08:45 -070023class VkFenceSync : public sk_gpu_test::FenceSync {
bsalomonedea94c2016-05-16 14:09:56 -070024public:
25 VkFenceSync(sk_sp<const GrVkInterface> vk, VkDevice device, VkQueue queue,
26 uint32_t queueFamilyIndex)
27 : fVk(std::move(vk))
28 , fDevice(device)
29 , fQueue(queue) {
30 SkDEBUGCODE(fUnfinishedSyncs = 0;)
31 VkCommandPoolCreateInfo createInfo;
32 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
33 createInfo.pNext = nullptr;
34 createInfo.flags = 0;
35 createInfo.queueFamilyIndex = queueFamilyIndex;
36 GR_VK_CALL_ERRCHECK(fVk, CreateCommandPool(fDevice, &createInfo, nullptr, &fCommandPool));
37
38 VkCommandBufferAllocateInfo allocateInfo;
39 allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
40 allocateInfo.pNext = nullptr;
41 allocateInfo.commandBufferCount = 1;
42 allocateInfo.commandPool = fCommandPool;
43 allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
44 GR_VK_CALL_ERRCHECK(fVk, AllocateCommandBuffers(fDevice, &allocateInfo, &fCommandBuffer));
45
46 VkCommandBufferBeginInfo beginInfo;
47 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
48 beginInfo.pNext = nullptr;
49 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
50 beginInfo.pInheritanceInfo = nullptr;
51 GR_VK_CALL_ERRCHECK(fVk, BeginCommandBuffer(fCommandBuffer, &beginInfo));
52 GR_VK_CALL_ERRCHECK(fVk, EndCommandBuffer(fCommandBuffer));
53 }
54
55 ~VkFenceSync() override {
56 SkASSERT(!fUnfinishedSyncs);
57 // If the above assertion is true then the command buffer should not be in flight.
58 GR_VK_CALL(fVk, FreeCommandBuffers(fDevice, fCommandPool, 1, &fCommandBuffer));
59 GR_VK_CALL(fVk, DestroyCommandPool(fDevice, fCommandPool, nullptr));
60 }
61
csmartdalton421a3c12016-10-04 11:08:45 -070062 sk_gpu_test::PlatformFence SK_WARN_UNUSED_RESULT insertFence() const override {
bsalomonedea94c2016-05-16 14:09:56 -070063 VkFence fence;
64 VkFenceCreateInfo info;
65 info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
66 info.pNext = nullptr;
67 info.flags = 0;
68 GR_VK_CALL_ERRCHECK(fVk, CreateFence(fDevice, &info, nullptr, &fence));
69 VkSubmitInfo submitInfo;
70 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
71 submitInfo.pNext = nullptr;
72 submitInfo.waitSemaphoreCount = 0;
73 submitInfo.pWaitSemaphores = nullptr;
74 submitInfo.pWaitDstStageMask = nullptr;
75 submitInfo.commandBufferCount = 1;
76 submitInfo.pCommandBuffers = &fCommandBuffer;
77 submitInfo.signalSemaphoreCount = 0;
78 submitInfo.pSignalSemaphores = nullptr;
79 GR_VK_CALL_ERRCHECK(fVk, QueueSubmit(fQueue, 1, &submitInfo, fence));
80 SkDEBUGCODE(++fUnfinishedSyncs;)
Brian Osmanbbdf45e2016-10-10 17:04:52 -040081 return (sk_gpu_test::PlatformFence)fence;
bsalomonedea94c2016-05-16 14:09:56 -070082 }
83
csmartdalton421a3c12016-10-04 11:08:45 -070084 bool waitFence(sk_gpu_test::PlatformFence opaqueFence) const override {
Brian Osmanbbdf45e2016-10-10 17:04:52 -040085 VkFence fence = (VkFence)opaqueFence;
bsalomonedea94c2016-05-16 14:09:56 -070086 static constexpr uint64_t kForever = ~((uint64_t)0);
87 auto result = GR_VK_CALL(fVk, WaitForFences(fDevice, 1, &fence, true, kForever));
88 return result != VK_TIMEOUT;
89 }
90
csmartdalton421a3c12016-10-04 11:08:45 -070091 void deleteFence(sk_gpu_test::PlatformFence opaqueFence) const override {
Brian Osmanbbdf45e2016-10-10 17:04:52 -040092 VkFence fence = (VkFence)opaqueFence;
bsalomonedea94c2016-05-16 14:09:56 -070093 GR_VK_CALL(fVk, DestroyFence(fDevice, fence, nullptr));
94 SkDEBUGCODE(--fUnfinishedSyncs;)
95 }
96
97private:
98 sk_sp<const GrVkInterface> fVk;
99 VkDevice fDevice;
100 VkQueue fQueue;
101 VkCommandPool fCommandPool;
102 VkCommandBuffer fCommandBuffer;
103 SkDEBUGCODE(mutable int fUnfinishedSyncs;)
csmartdalton421a3c12016-10-04 11:08:45 -0700104 typedef sk_gpu_test::FenceSync INHERITED;
bsalomonedea94c2016-05-16 14:09:56 -0700105};
106
csmartdalton024229a2016-10-04 14:24:23 -0700107GR_STATIC_ASSERT(sizeof(VkFence) <= sizeof(sk_gpu_test::PlatformFence));
108
bsalomonedea94c2016-05-16 14:09:56 -0700109// TODO: Implement swap buffers and finish
bsalomon18a2f9d2016-05-11 10:09:18 -0700110class VkTestContextImpl : public sk_gpu_test::VkTestContext {
111public:
Greg Daniel604b1972017-05-15 13:50:35 -0400112 static VkTestContext* Create(VkTestContext* sharedContext) {
Greg Danielf730c182018-07-02 20:15:37 +0000113 GrVkBackendContext backendContext;
114 bool ownsContext = true;
Greg Daniel604b1972017-05-15 13:50:35 -0400115 if (sharedContext) {
116 backendContext = sharedContext->getVkBackendContext();
Greg Danielf730c182018-07-02 20:15:37 +0000117 // We always delete the parent context last so make sure the child does not think they
118 // own the vulkan context.
119 ownsContext = false;
Greg Daniel604b1972017-05-15 13:50:35 -0400120 } else {
Greg Daniel35970ec2017-11-10 10:03:05 -0500121 PFN_vkGetInstanceProcAddr instProc;
122 PFN_vkGetDeviceProcAddr devProc;
123 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
124 return nullptr;
125 }
Greg Danielf730c182018-07-02 20:15:37 +0000126 if (!sk_gpu_test::CreateVkBackendContext(instProc, devProc, &backendContext)) {
127 return nullptr;
128 }
Greg Daniel604b1972017-05-15 13:50:35 -0400129 }
Greg Danielf730c182018-07-02 20:15:37 +0000130 return new VkTestContextImpl(backendContext, ownsContext);
bsalomonedea94c2016-05-16 14:09:56 -0700131 }
bsalomon18a2f9d2016-05-11 10:09:18 -0700132
133 ~VkTestContextImpl() override { this->teardown(); }
134
135 void testAbandon() override {}
136
bsalomonedea94c2016-05-16 14:09:56 -0700137 // There is really nothing to here since we don't own any unqueued command buffers here.
bsalomonc8699322016-05-11 11:55:36 -0700138 void submit() override {}
bsalomonedea94c2016-05-16 14:09:56 -0700139
bsalomonc8699322016-05-11 11:55:36 -0700140 void finish() override {}
141
Greg Daniel02611d92017-07-25 10:05:01 -0400142 sk_sp<GrContext> makeGrContext(const GrContextOptions& options) override {
Brian Salomon384fab42017-12-07 12:33:05 -0500143 return GrContext::MakeVulkan(fVk, options);
Greg Daniel02611d92017-07-25 10:05:01 -0400144 }
145
bsalomon18a2f9d2016-05-11 10:09:18 -0700146protected:
bsalomonedea94c2016-05-16 14:09:56 -0700147 void teardown() override {
148 INHERITED::teardown();
Greg Danielf730c182018-07-02 20:15:37 +0000149 fVk.fMemoryAllocator.reset();
150 if (fOwnsContext) {
151 GR_VK_CALL(this->vk(), DeviceWaitIdle(fVk.fDevice));
152 GR_VK_CALL(this->vk(), DestroyDevice(fVk.fDevice, nullptr));
153 GR_VK_CALL(this->vk(), DestroyInstance(fVk.fInstance, nullptr));
154 }
bsalomonedea94c2016-05-16 14:09:56 -0700155 }
bsalomon18a2f9d2016-05-11 10:09:18 -0700156
157private:
Greg Danielf730c182018-07-02 20:15:37 +0000158 VkTestContextImpl(const GrVkBackendContext& backendContext, bool ownsContext)
159 : VkTestContext(backendContext, ownsContext) {
160 fFenceSync.reset(new VkFenceSync(fVk.fInterface, fVk.fDevice, fVk.fQueue,
161 fVk.fGraphicsQueueIndex));
bsalomonedea94c2016-05-16 14:09:56 -0700162 }
163
bsalomon18a2f9d2016-05-11 10:09:18 -0700164 void onPlatformMakeCurrent() const override {}
Brian Salomon55ad7742017-11-17 09:25:23 -0500165 std::function<void()> onPlatformGetAutoContextRestore() const override { return nullptr; }
bsalomon18a2f9d2016-05-11 10:09:18 -0700166 void onPlatformSwapBuffers() const override {}
167
168 typedef sk_gpu_test::VkTestContext INHERITED;
169};
csmartdalton421a3c12016-10-04 11:08:45 -0700170} // anonymous namespace
bsalomon18a2f9d2016-05-11 10:09:18 -0700171
172namespace sk_gpu_test {
Greg Daniel604b1972017-05-15 13:50:35 -0400173VkTestContext* CreatePlatformVkTestContext(VkTestContext* sharedContext) {
174 return VkTestContextImpl::Create(sharedContext);
175}
bsalomon18a2f9d2016-05-11 10:09:18 -0700176} // namespace sk_gpu_test
177
178#endif