Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrMtlBuffer.h" |
| 9 | #include "GrMtlGpu.h" |
| 10 | #include "GrGpuResourcePriv.h" |
| 11 | #include "GrTypesPriv.h" |
| 12 | |
| 13 | #ifdef SK_DEBUG |
| 14 | #define VALIDATE() this->validate() |
| 15 | #else |
| 16 | #define VALIDATE() do {} while(false) |
| 17 | #endif |
| 18 | |
Brian Salomon | ae64c19 | 2019-02-05 09:41:37 -0500 | [diff] [blame] | 19 | sk_sp<GrMtlBuffer> GrMtlBuffer::Make(GrMtlGpu* gpu, size_t size, GrGpuBufferType intendedType, |
Brian Salomon | 12d2264 | 2019-01-29 14:38:50 -0500 | [diff] [blame] | 20 | GrAccessPattern accessPattern, const void* data) { |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 21 | sk_sp<GrMtlBuffer> buffer(new GrMtlBuffer(gpu, size, intendedType, accessPattern)); |
| 22 | if (data && !buffer->onUpdateData(data, size)) { |
| 23 | return nullptr; |
| 24 | } |
Brian Salomon | 12d2264 | 2019-01-29 14:38:50 -0500 | [diff] [blame] | 25 | return buffer; |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 26 | } |
| 27 | |
Brian Salomon | ae64c19 | 2019-02-05 09:41:37 -0500 | [diff] [blame] | 28 | GrMtlBuffer::GrMtlBuffer(GrMtlGpu* gpu, size_t size, GrGpuBufferType intendedType, |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 29 | GrAccessPattern accessPattern) |
| 30 | : INHERITED(gpu, size, intendedType, accessPattern) |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 31 | , fIsDynamic(accessPattern == kDynamic_GrAccessPattern) { |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 32 | // TODO: We are treating all buffers as static access since we don't have an implementation to |
| 33 | // synchronize gpu and cpu access of a resource yet. See comments in GrMtlBuffer::internalMap() |
| 34 | // and interalUnmap() for more details. |
| 35 | fIsDynamic = false; |
Timothy Liang | 4e67984 | 2018-08-08 14:21:12 -0400 | [diff] [blame] | 36 | |
| 37 | // The managed resource mode is only available for macOS. iOS should use shared. |
Greg Daniel | d742b6b | 2019-02-05 15:15:31 -0500 | [diff] [blame] | 38 | fMtlBuffer = size == 0 ? nil : |
Timothy Liang | 4e67984 | 2018-08-08 14:21:12 -0400 | [diff] [blame] | 39 | [gpu->device() newBufferWithLength: size |
| 40 | options: !fIsDynamic ? MTLResourceStorageModePrivate |
| 41 | #ifdef SK_BUILD_FOR_MAC |
| 42 | : MTLResourceStorageModeManaged]; |
| 43 | #else |
| 44 | : MTLResourceStorageModeShared]; |
| 45 | #endif |
| 46 | this->registerWithCache(SkBudgeted::kYes); |
Timothy Liang | 9047c06 | 2018-08-03 10:00:50 -0400 | [diff] [blame] | 47 | VALIDATE(); |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 48 | } |
| 49 | |
| 50 | GrMtlBuffer::~GrMtlBuffer() { |
| 51 | SkASSERT(fMtlBuffer == nil); |
| 52 | SkASSERT(fMappedBuffer == nil); |
| 53 | SkASSERT(fMapPtr == nullptr); |
| 54 | } |
| 55 | |
| 56 | bool GrMtlBuffer::onUpdateData(const void* src, size_t srcInBytes) { |
| 57 | if (fMtlBuffer == nil) { |
| 58 | return false; |
| 59 | } |
| 60 | if (srcInBytes > fMtlBuffer.length) { |
| 61 | return false; |
| 62 | } |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 63 | VALIDATE(); |
| 64 | |
| 65 | this->internalMap(srcInBytes); |
| 66 | if (fMapPtr == nil) { |
| 67 | return false; |
| 68 | } |
| 69 | SkASSERT(fMappedBuffer); |
| 70 | SkASSERT(srcInBytes == fMappedBuffer.length); |
| 71 | memcpy(fMapPtr, src, srcInBytes); |
| 72 | this->internalUnmap(srcInBytes); |
| 73 | |
| 74 | VALIDATE(); |
| 75 | return true; |
| 76 | } |
| 77 | |
| 78 | inline GrMtlGpu* GrMtlBuffer::mtlGpu() const { |
| 79 | SkASSERT(!this->wasDestroyed()); |
| 80 | return static_cast<GrMtlGpu*>(this->getGpu()); |
| 81 | } |
| 82 | |
| 83 | void GrMtlBuffer::onAbandon() { |
| 84 | fMtlBuffer = nil; |
| 85 | fMappedBuffer = nil; |
| 86 | fMapPtr = nullptr; |
| 87 | VALIDATE(); |
| 88 | INHERITED::onAbandon(); |
| 89 | } |
| 90 | |
| 91 | void GrMtlBuffer::onRelease() { |
| 92 | if (!this->wasDestroyed()) { |
| 93 | VALIDATE(); |
| 94 | fMtlBuffer = nil; |
| 95 | fMappedBuffer = nil; |
| 96 | fMapPtr = nullptr; |
| 97 | VALIDATE(); |
| 98 | } |
| 99 | INHERITED::onRelease(); |
| 100 | } |
| 101 | |
| 102 | void GrMtlBuffer::internalMap(size_t sizeInBytes) { |
| 103 | SkASSERT(fMtlBuffer); |
| 104 | if (this->wasDestroyed()) { |
| 105 | return; |
| 106 | } |
| 107 | VALIDATE(); |
| 108 | SkASSERT(!this->isMapped()); |
| 109 | if (fIsDynamic) { |
| 110 | // TODO: We will want to decide if we need to create a new buffer here in order to avoid |
| 111 | // possibly invalidating a buffer which is being used by the gpu. |
| 112 | fMappedBuffer = fMtlBuffer; |
| 113 | fMapPtr = fMappedBuffer.contents; |
| 114 | } else { |
| 115 | // TODO: We can't ensure that map will only be called once on static access buffers until |
| 116 | // we actually enable dynamic access. |
| 117 | // SkASSERT(fMappedBuffer == nil); |
| 118 | fMappedBuffer = |
| 119 | [this->mtlGpu()->device() newBufferWithLength: sizeInBytes |
Timothy Liang | 4e67984 | 2018-08-08 14:21:12 -0400 | [diff] [blame] | 120 | #ifdef SK_BUILD_FOR_MAC |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 121 | options: MTLResourceStorageModeManaged]; |
Timothy Liang | 4e67984 | 2018-08-08 14:21:12 -0400 | [diff] [blame] | 122 | #else |
| 123 | options: MTLResourceStorageModeShared]; |
| 124 | #endif |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 125 | fMapPtr = fMappedBuffer.contents; |
| 126 | } |
| 127 | VALIDATE(); |
| 128 | } |
| 129 | |
| 130 | void GrMtlBuffer::internalUnmap(size_t sizeInBytes) { |
| 131 | SkASSERT(fMtlBuffer); |
| 132 | if (this->wasDestroyed()) { |
| 133 | return; |
| 134 | } |
| 135 | VALIDATE(); |
| 136 | SkASSERT(this->isMapped()); |
| 137 | if (fMtlBuffer == nil) { |
| 138 | fMappedBuffer = nil; |
| 139 | fMapPtr = nullptr; |
| 140 | return; |
| 141 | } |
Timothy Liang | 4e67984 | 2018-08-08 14:21:12 -0400 | [diff] [blame] | 142 | #ifdef SK_BUILD_FOR_MAC |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 143 | // TODO: by calling didModifyRange here we invalidate the buffer. This will cause problems for |
| 144 | // dynamic access buffers if they are being used by the gpu. |
| 145 | [fMappedBuffer didModifyRange: NSMakeRange(0, sizeInBytes)]; |
Timothy Liang | 4e67984 | 2018-08-08 14:21:12 -0400 | [diff] [blame] | 146 | #endif |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 147 | if (!fIsDynamic) { |
| 148 | id<MTLBlitCommandEncoder> blitCmdEncoder = |
| 149 | [this->mtlGpu()->commandBuffer() blitCommandEncoder]; |
| 150 | [blitCmdEncoder copyFromBuffer: fMappedBuffer |
| 151 | sourceOffset: 0 |
| 152 | toBuffer: fMtlBuffer |
| 153 | destinationOffset: 0 |
| 154 | size: sizeInBytes]; |
| 155 | [blitCmdEncoder endEncoding]; |
| 156 | } |
| 157 | fMappedBuffer = nil; |
| 158 | fMapPtr = nullptr; |
| 159 | } |
| 160 | |
| 161 | void GrMtlBuffer::onMap() { |
| 162 | this->internalMap(fMtlBuffer.length); |
| 163 | } |
| 164 | |
| 165 | void GrMtlBuffer::onUnmap() { |
| 166 | this->internalUnmap(fMappedBuffer.length); |
| 167 | } |
| 168 | |
| 169 | #ifdef SK_DEBUG |
| 170 | void GrMtlBuffer::validate() const { |
| 171 | SkASSERT(fMtlBuffer == nil || |
Brian Salomon | 95548f6 | 2019-02-05 16:07:56 -0500 | [diff] [blame] | 172 | this->intendedType() == GrGpuBufferType::kVertex || |
| 173 | this->intendedType() == GrGpuBufferType::kIndex || |
| 174 | this->intendedType() == GrGpuBufferType::kXferCpuToGpu || |
| 175 | this->intendedType() == GrGpuBufferType::kXferGpuToCpu); |
Timothy Liang | 49528b6 | 2018-08-02 14:18:37 -0400 | [diff] [blame] | 176 | SkASSERT(fMappedBuffer == nil || fMtlBuffer == nil || |
| 177 | fMappedBuffer.length <= fMtlBuffer.length); |
| 178 | SkASSERT(fIsDynamic == false); // TODO: implement synchronization to allow dynamic access. |
| 179 | } |
| 180 | #endif |