Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/vk/GrVkCommandPool.h" |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 9 | |
Adlai Holler | a069304 | 2020-10-14 11:23:11 -0400 | [diff] [blame^] | 10 | #include "src/gpu/GrDirectContextPriv.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 11 | #include "src/gpu/vk/GrVkCommandBuffer.h" |
| 12 | #include "src/gpu/vk/GrVkGpu.h" |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 13 | |
Greg Daniel | e643da6 | 2019-11-05 12:36:42 -0500 | [diff] [blame] | 14 | GrVkCommandPool* GrVkCommandPool::Create(GrVkGpu* gpu) { |
Greg Daniel | 6253b57 | 2020-09-11 16:39:28 -0400 | [diff] [blame] | 15 | VkCommandPoolCreateFlags cmdPoolCreateFlags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT; |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 16 | if (gpu->protectedContext()) { |
| 17 | cmdPoolCreateFlags |= VK_COMMAND_POOL_CREATE_PROTECTED_BIT; |
| 18 | } |
Emircan Uysaler | 23ca4e7 | 2019-06-24 10:53:09 -0400 | [diff] [blame] | 19 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 20 | const VkCommandPoolCreateInfo cmdPoolInfo = { |
| 21 | VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, // sType |
| 22 | nullptr, // pNext |
| 23 | cmdPoolCreateFlags, // CmdPoolCreateFlags |
| 24 | gpu->queueIndex(), // queueFamilyIndex |
| 25 | }; |
Greg Daniel | 9b63dc8 | 2019-11-06 09:21:55 -0500 | [diff] [blame] | 26 | VkResult result; |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 27 | VkCommandPool pool; |
Greg Daniel | 9b63dc8 | 2019-11-06 09:21:55 -0500 | [diff] [blame] | 28 | GR_VK_CALL_RESULT(gpu, result, CreateCommandPool(gpu->device(), &cmdPoolInfo, nullptr, &pool)); |
| 29 | if (result != VK_SUCCESS) { |
| 30 | return nullptr; |
| 31 | } |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 32 | |
| 33 | GrVkPrimaryCommandBuffer* primaryCmdBuffer = GrVkPrimaryCommandBuffer::Create(gpu, pool); |
| 34 | if (!primaryCmdBuffer) { |
| 35 | GR_VK_CALL(gpu->vkInterface(), DestroyCommandPool(gpu->device(), pool, nullptr)); |
| 36 | return nullptr; |
| 37 | } |
| 38 | |
| 39 | return new GrVkCommandPool(gpu, pool, primaryCmdBuffer); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 40 | } |
| 41 | |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 42 | GrVkCommandPool::GrVkCommandPool(GrVkGpu* gpu, VkCommandPool commandPool, |
| 43 | GrVkPrimaryCommandBuffer* primaryCmdBuffer) |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 44 | : GrVkManagedResource(gpu) |
| 45 | , fCommandPool(commandPool) |
Greg Daniel | 43833b0 | 2020-07-14 09:21:48 -0400 | [diff] [blame] | 46 | , fPrimaryCommandBuffer(primaryCmdBuffer) |
| 47 | , fMaxCachedSecondaryCommandBuffers( |
| 48 | gpu->vkCaps().maxPerPoolCachedSecondaryCommandBuffers()) { |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 49 | } |
| 50 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 51 | std::unique_ptr<GrVkSecondaryCommandBuffer> GrVkCommandPool::findOrCreateSecondaryCommandBuffer( |
| 52 | GrVkGpu* gpu) { |
| 53 | std::unique_ptr<GrVkSecondaryCommandBuffer> result; |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 54 | if (fAvailableSecondaryBuffers.count()) { |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 55 | result = std::move(fAvailableSecondaryBuffers.back()); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 56 | fAvailableSecondaryBuffers.pop_back(); |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 57 | } else{ |
| 58 | result.reset(GrVkSecondaryCommandBuffer::Create(gpu, this)); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 59 | } |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 60 | return result; |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 61 | } |
| 62 | |
| 63 | void GrVkCommandPool::recycleSecondaryCommandBuffer(GrVkSecondaryCommandBuffer* buffer) { |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 64 | std::unique_ptr<GrVkSecondaryCommandBuffer> scb(buffer); |
Greg Daniel | 43833b0 | 2020-07-14 09:21:48 -0400 | [diff] [blame] | 65 | if (fAvailableSecondaryBuffers.count() < fMaxCachedSecondaryCommandBuffers) { |
| 66 | fAvailableSecondaryBuffers.push_back(std::move(scb)); |
Greg Daniel | 0927dd6 | 2020-07-20 15:13:29 -0400 | [diff] [blame] | 67 | } else { |
| 68 | VkCommandBuffer vkBuffer = buffer->vkCommandBuffer(); |
| 69 | GR_VK_CALL(fGpu->vkInterface(), |
| 70 | FreeCommandBuffers(fGpu->device(), fCommandPool, 1, &vkBuffer)); |
Greg Daniel | 43833b0 | 2020-07-14 09:21:48 -0400 | [diff] [blame] | 71 | } |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 72 | } |
| 73 | |
| 74 | void GrVkCommandPool::close() { |
| 75 | fOpen = false; |
| 76 | } |
| 77 | |
| 78 | void GrVkCommandPool::reset(GrVkGpu* gpu) { |
| 79 | SkASSERT(!fOpen); |
| 80 | fOpen = true; |
Greg Daniel | 95f0b16 | 2019-11-11 13:42:30 -0500 | [diff] [blame] | 81 | // We can't use the normal result macro calls here because we may call reset on a different |
| 82 | // thread and we can't be modifying the lost state on the GrVkGpu. We just call |
| 83 | // vkResetCommandPool and assume the "next" vulkan call will catch the lost device. |
| 84 | SkDEBUGCODE(VkResult result = )GR_VK_CALL(gpu->vkInterface(), |
| 85 | ResetCommandPool(gpu->device(), fCommandPool, 0)); |
| 86 | SkASSERT(result == VK_SUCCESS || result == VK_ERROR_DEVICE_LOST); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 87 | } |
| 88 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 89 | void GrVkCommandPool::releaseResources() { |
Brian Salomon | e39526b | 2019-06-24 16:35:53 -0400 | [diff] [blame] | 90 | TRACE_EVENT0("skia.gpu", TRACE_FUNC); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 91 | SkASSERT(!fOpen); |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 92 | fPrimaryCommandBuffer->releaseResources(); |
Greg Daniel | 7a32f1b | 2019-11-26 16:18:01 -0500 | [diff] [blame] | 93 | fPrimaryCommandBuffer->recycleSecondaryCommandBuffers(this); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 94 | } |
| 95 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 96 | void GrVkCommandPool::freeGPUData() const { |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 97 | // TODO: having freeGPUData virtual on GrManagedResource be const seems like a bad restriction since |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 98 | // we are changing the internal objects of these classes when it is called. We should go back a |
| 99 | // revisit how much of a headache it would be to make this function non-const |
| 100 | GrVkCommandPool* nonConstThis = const_cast<GrVkCommandPool*>(this); |
| 101 | nonConstThis->close(); |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 102 | nonConstThis->releaseResources(); |
| 103 | fPrimaryCommandBuffer->freeGPUData(fGpu, fCommandPool); |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 104 | for (const auto& buffer : fAvailableSecondaryBuffers) { |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 105 | buffer->freeGPUData(fGpu, fCommandPool); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 106 | } |
| 107 | if (fCommandPool != VK_NULL_HANDLE) { |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 108 | GR_VK_CALL(fGpu->vkInterface(), |
| 109 | DestroyCommandPool(fGpu->device(), fCommandPool, nullptr)); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 110 | } |
| 111 | } |