blob: 54d0a0d37446ce6115310047e8ce2d5f8b1f76a0 [file] [log] [blame]
Jamie Madill1f46bc12018-02-20 16:09:43 -05001//
2// Copyright 2017 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// CommandGraph:
7// Deferred work constructed by GL calls, that will later be flushed to Vulkan.
8//
9
10#include "libANGLE/renderer/vulkan/CommandGraph.h"
11
12#include "libANGLE/renderer/vulkan/RenderTargetVk.h"
13#include "libANGLE/renderer/vulkan/RendererVk.h"
14#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill26084d02018-04-09 13:44:04 -040015#include "libANGLE/renderer/vulkan/vk_helpers.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050016
17namespace rx
18{
19
20namespace vk
21{
22
23namespace
24{
25
26Error InitAndBeginCommandBuffer(VkDevice device,
27 const CommandPool &commandPool,
28 const VkCommandBufferInheritanceInfo &inheritanceInfo,
29 VkCommandBufferUsageFlags flags,
30 CommandBuffer *commandBuffer)
31{
32 ASSERT(!commandBuffer->valid());
33
34 VkCommandBufferAllocateInfo createInfo;
35 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
36 createInfo.pNext = nullptr;
37 createInfo.commandPool = commandPool.getHandle();
38 createInfo.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
39 createInfo.commandBufferCount = 1;
40
41 ANGLE_TRY(commandBuffer->init(device, createInfo));
42
43 VkCommandBufferBeginInfo beginInfo;
44 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
45 beginInfo.pNext = nullptr;
46 beginInfo.flags = flags | VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
47 beginInfo.pInheritanceInfo = &inheritanceInfo;
48
49 ANGLE_TRY(commandBuffer->begin(beginInfo));
50 return NoError();
51}
52
53} // anonymous namespace
54
Jamie Madill6c7ab7f2018-03-31 14:19:15 -040055// CommandGraphResource implementation.
56CommandGraphResource::CommandGraphResource() : mCurrentWritingNode(nullptr)
57{
58}
59
60CommandGraphResource::~CommandGraphResource()
61{
62}
63
64void CommandGraphResource::updateQueueSerial(Serial queueSerial)
65{
66 ASSERT(queueSerial >= mStoredQueueSerial);
67
68 if (queueSerial > mStoredQueueSerial)
69 {
70 mCurrentWritingNode = nullptr;
71 mCurrentReadingNodes.clear();
72 mStoredQueueSerial = queueSerial;
73 }
74}
75
76Serial CommandGraphResource::getQueueSerial() const
77{
78 return mStoredQueueSerial;
79}
80
Jamie Madill9cceac42018-03-31 14:19:16 -040081bool CommandGraphResource::hasChildlessWritingNode() const
Jamie Madill6c7ab7f2018-03-31 14:19:15 -040082{
Jamie Madill9cceac42018-03-31 14:19:16 -040083 return (mCurrentWritingNode != nullptr && !mCurrentWritingNode->hasChildren());
Jamie Madill6c7ab7f2018-03-31 14:19:15 -040084}
85
Jamie Madill9cceac42018-03-31 14:19:16 -040086CommandGraphNode *CommandGraphResource::getCurrentWritingNode()
Jamie Madill6c7ab7f2018-03-31 14:19:15 -040087{
Jamie Madill6c7ab7f2018-03-31 14:19:15 -040088 return mCurrentWritingNode;
89}
90
91CommandGraphNode *CommandGraphResource::getNewWritingNode(RendererVk *renderer)
92{
93 CommandGraphNode *newCommands = renderer->allocateCommandNode();
94 onWriteResource(newCommands, renderer->getCurrentQueueSerial());
95 return newCommands;
96}
97
98Error CommandGraphResource::beginWriteResource(RendererVk *renderer,
99 CommandBuffer **commandBufferOut)
100{
101 CommandGraphNode *commands = getNewWritingNode(renderer);
102
103 VkDevice device = renderer->getDevice();
104 ANGLE_TRY(commands->beginOutsideRenderPassRecording(device, renderer->getCommandPool(),
105 commandBufferOut));
106 return NoError();
107}
108
109void CommandGraphResource::onWriteResource(CommandGraphNode *writingNode, Serial serial)
110{
111 updateQueueSerial(serial);
112
Jamie Madill9cceac42018-03-31 14:19:16 -0400113 // Make sure any open reads and writes finish before we execute 'writingNode'.
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400114 if (!mCurrentReadingNodes.empty())
115 {
116 CommandGraphNode::SetHappensBeforeDependencies(mCurrentReadingNodes, writingNode);
117 mCurrentReadingNodes.clear();
118 }
119
120 if (mCurrentWritingNode && mCurrentWritingNode != writingNode)
121 {
122 CommandGraphNode::SetHappensBeforeDependency(mCurrentWritingNode, writingNode);
123 }
124
125 mCurrentWritingNode = writingNode;
126}
127
128void CommandGraphResource::onReadResource(CommandGraphNode *readingNode, Serial serial)
129{
Jamie Madill9cceac42018-03-31 14:19:16 -0400130 updateQueueSerial(serial);
131
132 if (hasChildlessWritingNode())
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400133 {
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400134 ASSERT(mStoredQueueSerial == serial);
Jamie Madill9cceac42018-03-31 14:19:16 -0400135
136 // Ensure 'readingNode' happens after the current writing node.
137 CommandGraphNode::SetHappensBeforeDependency(mCurrentWritingNode, readingNode);
138 }
139
140 // Add the read node to the list of nodes currently reading this resource.
141 mCurrentReadingNodes.push_back(readingNode);
142}
143
144bool CommandGraphResource::checkResourceInUseAndRefreshDeps(RendererVk *renderer)
145{
Jamie Madill26084d02018-04-09 13:44:04 -0400146 if (!renderer->isResourceInUse(*this) ||
147 (renderer->getCurrentQueueSerial() > mStoredQueueSerial))
Jamie Madill9cceac42018-03-31 14:19:16 -0400148 {
149 mCurrentReadingNodes.clear();
150 mCurrentWritingNode = nullptr;
151 return false;
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400152 }
153 else
154 {
Jamie Madill9cceac42018-03-31 14:19:16 -0400155 return true;
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400156 }
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400157}
158
Jamie Madill1f46bc12018-02-20 16:09:43 -0500159// CommandGraphNode implementation.
160
161CommandGraphNode::CommandGraphNode() : mHasChildren(false), mVisitedState(VisitedState::Unvisited)
162{
163}
164
165CommandGraphNode::~CommandGraphNode()
166{
167 mRenderPassFramebuffer.setHandle(VK_NULL_HANDLE);
168
169 // Command buffers are managed by the command pool, so don't need to be freed.
170 mOutsideRenderPassCommands.releaseHandle();
171 mInsideRenderPassCommands.releaseHandle();
172}
173
174CommandBuffer *CommandGraphNode::getOutsideRenderPassCommands()
175{
176 ASSERT(!mHasChildren);
177 return &mOutsideRenderPassCommands;
178}
179
180CommandBuffer *CommandGraphNode::getInsideRenderPassCommands()
181{
182 ASSERT(!mHasChildren);
183 return &mInsideRenderPassCommands;
184}
185
186Error CommandGraphNode::beginOutsideRenderPassRecording(VkDevice device,
187 const CommandPool &commandPool,
188 CommandBuffer **commandsOut)
189{
190 ASSERT(!mHasChildren);
191
192 VkCommandBufferInheritanceInfo inheritanceInfo;
193 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
194 inheritanceInfo.pNext = nullptr;
195 inheritanceInfo.renderPass = VK_NULL_HANDLE;
196 inheritanceInfo.subpass = 0;
197 inheritanceInfo.framebuffer = VK_NULL_HANDLE;
198 inheritanceInfo.occlusionQueryEnable = VK_FALSE;
199 inheritanceInfo.queryFlags = 0;
200 inheritanceInfo.pipelineStatistics = 0;
201
202 ANGLE_TRY(InitAndBeginCommandBuffer(device, commandPool, inheritanceInfo, 0,
203 &mOutsideRenderPassCommands));
204
205 *commandsOut = &mOutsideRenderPassCommands;
206 return NoError();
207}
208
209Error CommandGraphNode::beginInsideRenderPassRecording(RendererVk *renderer,
210 CommandBuffer **commandsOut)
211{
212 ASSERT(!mHasChildren);
213
214 // Get a compatible RenderPass from the cache so we can initialize the inheritance info.
215 // TODO(jmadill): Support query for compatible/conformant render pass. htto://anglebug.com/2361
216 RenderPass *compatibleRenderPass;
217 ANGLE_TRY(renderer->getCompatibleRenderPass(mRenderPassDesc, &compatibleRenderPass));
218
219 VkCommandBufferInheritanceInfo inheritanceInfo;
220 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
221 inheritanceInfo.pNext = nullptr;
222 inheritanceInfo.renderPass = compatibleRenderPass->getHandle();
223 inheritanceInfo.subpass = 0;
224 inheritanceInfo.framebuffer = mRenderPassFramebuffer.getHandle();
225 inheritanceInfo.occlusionQueryEnable = VK_FALSE;
226 inheritanceInfo.queryFlags = 0;
227 inheritanceInfo.pipelineStatistics = 0;
228
229 ANGLE_TRY(InitAndBeginCommandBuffer(
230 renderer->getDevice(), renderer->getCommandPool(), inheritanceInfo,
231 VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, &mInsideRenderPassCommands));
232
233 *commandsOut = &mInsideRenderPassCommands;
234 return NoError();
235}
236
237void CommandGraphNode::storeRenderPassInfo(const Framebuffer &framebuffer,
238 const gl::Rectangle renderArea,
Jamie Madillbcf467f2018-05-23 09:46:00 -0400239 const vk::RenderPassDesc &renderPassDesc,
Jamie Madill1f46bc12018-02-20 16:09:43 -0500240 const std::vector<VkClearValue> &clearValues)
241{
Jamie Madillbcf467f2018-05-23 09:46:00 -0400242 mRenderPassDesc = renderPassDesc;
Jamie Madill1f46bc12018-02-20 16:09:43 -0500243 mRenderPassFramebuffer.setHandle(framebuffer.getHandle());
244 mRenderPassRenderArea = renderArea;
245 std::copy(clearValues.begin(), clearValues.end(), mRenderPassClearValues.begin());
246}
247
Jamie Madill1f46bc12018-02-20 16:09:43 -0500248// static
249void CommandGraphNode::SetHappensBeforeDependency(CommandGraphNode *beforeNode,
250 CommandGraphNode *afterNode)
251{
Jamie Madill9cceac42018-03-31 14:19:16 -0400252 ASSERT(beforeNode != afterNode && !beforeNode->isChildOf(afterNode));
Jamie Madill1f46bc12018-02-20 16:09:43 -0500253 afterNode->mParents.emplace_back(beforeNode);
254 beforeNode->setHasChildren();
Jamie Madill1f46bc12018-02-20 16:09:43 -0500255}
256
257// static
258void CommandGraphNode::SetHappensBeforeDependencies(
259 const std::vector<CommandGraphNode *> &beforeNodes,
260 CommandGraphNode *afterNode)
261{
262 afterNode->mParents.insert(afterNode->mParents.end(), beforeNodes.begin(), beforeNodes.end());
263
264 // TODO(jmadill): is there a faster way to do this?
265 for (CommandGraphNode *beforeNode : beforeNodes)
266 {
267 beforeNode->setHasChildren();
268
269 ASSERT(beforeNode != afterNode && !beforeNode->isChildOf(afterNode));
270 }
271}
272
273bool CommandGraphNode::hasParents() const
274{
275 return !mParents.empty();
276}
277
278void CommandGraphNode::setHasChildren()
279{
280 mHasChildren = true;
281}
282
283bool CommandGraphNode::hasChildren() const
284{
285 return mHasChildren;
286}
287
288// Do not call this in anything but testing code, since it's slow.
289bool CommandGraphNode::isChildOf(CommandGraphNode *parent)
290{
291 std::set<CommandGraphNode *> visitedList;
292 std::vector<CommandGraphNode *> openList;
293 openList.insert(openList.begin(), mParents.begin(), mParents.end());
294 while (!openList.empty())
295 {
296 CommandGraphNode *current = openList.back();
297 openList.pop_back();
298 if (visitedList.count(current) == 0)
299 {
300 if (current == parent)
301 {
302 return true;
303 }
304 visitedList.insert(current);
305 openList.insert(openList.end(), current->mParents.begin(), current->mParents.end());
306 }
307 }
308
309 return false;
310}
311
312VisitedState CommandGraphNode::visitedState() const
313{
314 return mVisitedState;
315}
316
317void CommandGraphNode::visitParents(std::vector<CommandGraphNode *> *stack)
318{
319 ASSERT(mVisitedState == VisitedState::Unvisited);
320 stack->insert(stack->end(), mParents.begin(), mParents.end());
321 mVisitedState = VisitedState::Ready;
322}
323
324Error CommandGraphNode::visitAndExecute(VkDevice device,
325 Serial serial,
326 RenderPassCache *renderPassCache,
327 CommandBuffer *primaryCommandBuffer)
328{
329 if (mOutsideRenderPassCommands.valid())
330 {
331 mOutsideRenderPassCommands.end();
332 primaryCommandBuffer->executeCommands(1, &mOutsideRenderPassCommands);
333 }
334
335 if (mInsideRenderPassCommands.valid())
336 {
337 // Pull a compatible RenderPass from the cache.
338 // TODO(jmadill): Insert real ops and layout transitions.
339 RenderPass *renderPass = nullptr;
340 ANGLE_TRY(
341 renderPassCache->getCompatibleRenderPass(device, serial, mRenderPassDesc, &renderPass));
342
343 mInsideRenderPassCommands.end();
344
345 VkRenderPassBeginInfo beginInfo;
346 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
347 beginInfo.pNext = nullptr;
348 beginInfo.renderPass = renderPass->getHandle();
349 beginInfo.framebuffer = mRenderPassFramebuffer.getHandle();
350 beginInfo.renderArea.offset.x = static_cast<uint32_t>(mRenderPassRenderArea.x);
351 beginInfo.renderArea.offset.y = static_cast<uint32_t>(mRenderPassRenderArea.y);
352 beginInfo.renderArea.extent.width = static_cast<uint32_t>(mRenderPassRenderArea.width);
353 beginInfo.renderArea.extent.height = static_cast<uint32_t>(mRenderPassRenderArea.height);
354 beginInfo.clearValueCount = mRenderPassDesc.attachmentCount();
355 beginInfo.pClearValues = mRenderPassClearValues.data();
356
357 primaryCommandBuffer->beginRenderPass(beginInfo,
358 VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
359 primaryCommandBuffer->executeCommands(1, &mInsideRenderPassCommands);
360 primaryCommandBuffer->endRenderPass();
361 }
362
363 mVisitedState = VisitedState::Visited;
364 return NoError();
365}
366
Luc Ferron14f48172018-04-11 08:43:28 -0400367const gl::Rectangle &CommandGraphNode::getRenderPassRenderArea() const
368{
369 return mRenderPassRenderArea;
370}
371
Jamie Madill1f46bc12018-02-20 16:09:43 -0500372// CommandGraph implementation.
373CommandGraph::CommandGraph()
374{
375}
376
377CommandGraph::~CommandGraph()
378{
379 ASSERT(empty());
380}
381
382CommandGraphNode *CommandGraph::allocateNode()
383{
384 // TODO(jmadill): Use a pool allocator for the CPU node allocations.
385 CommandGraphNode *newCommands = new CommandGraphNode();
386 mNodes.emplace_back(newCommands);
387 return newCommands;
388}
389
390Error CommandGraph::submitCommands(VkDevice device,
391 Serial serial,
392 RenderPassCache *renderPassCache,
393 CommandPool *commandPool,
394 CommandBuffer *primaryCommandBufferOut)
395{
396 VkCommandBufferAllocateInfo primaryInfo;
397 primaryInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
398 primaryInfo.pNext = nullptr;
399 primaryInfo.commandPool = commandPool->getHandle();
400 primaryInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
401 primaryInfo.commandBufferCount = 1;
402
403 ANGLE_TRY(primaryCommandBufferOut->init(device, primaryInfo));
404
405 if (mNodes.empty())
406 {
407 return NoError();
408 }
409
410 std::vector<CommandGraphNode *> nodeStack;
411
412 VkCommandBufferBeginInfo beginInfo;
413 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
414 beginInfo.pNext = nullptr;
415 beginInfo.flags = 0;
416 beginInfo.pInheritanceInfo = nullptr;
417
418 ANGLE_TRY(primaryCommandBufferOut->begin(beginInfo));
419
420 for (CommandGraphNode *topLevelNode : mNodes)
421 {
422 // Only process commands that don't have child commands. The others will be pulled in
423 // automatically. Also skip commands that have already been visited.
424 if (topLevelNode->hasChildren() || topLevelNode->visitedState() != VisitedState::Unvisited)
425 continue;
426
427 nodeStack.push_back(topLevelNode);
428
429 while (!nodeStack.empty())
430 {
431 CommandGraphNode *node = nodeStack.back();
432
433 switch (node->visitedState())
434 {
435 case VisitedState::Unvisited:
436 node->visitParents(&nodeStack);
437 break;
438 case VisitedState::Ready:
439 ANGLE_TRY(node->visitAndExecute(device, serial, renderPassCache,
440 primaryCommandBufferOut));
441 nodeStack.pop_back();
442 break;
443 case VisitedState::Visited:
444 nodeStack.pop_back();
445 break;
446 default:
447 UNREACHABLE();
448 break;
449 }
450 }
451 }
452
453 ANGLE_TRY(primaryCommandBufferOut->end());
454
455 // TODO(jmadill): Use pool allocation so we don't need to deallocate command graph.
Jamie Madill6c7ab7f2018-03-31 14:19:15 -0400456 for (CommandGraphNode *node : mNodes)
Jamie Madill1f46bc12018-02-20 16:09:43 -0500457 {
458 delete node;
459 }
460 mNodes.clear();
461
462 return NoError();
463}
464
465bool CommandGraph::empty() const
466{
467 return mNodes.empty();
468}
469
470} // namespace vk
471} // namespace rx