Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrVkResource_DEFINED |
| 9 | #define GrVkResource_DEFINED |
| 10 | |
Greg Daniel | 54bfb18 | 2018-11-20 17:12:36 -0500 | [diff] [blame] | 11 | #include "GrVkVulkan.h" |
| 12 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 13 | #include "SkRandom.h" |
egdaniel | 00db3fd | 2016-07-29 08:55:53 -0700 | [diff] [blame] | 14 | #include "SkTHash.h" |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 15 | #include <atomic> |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 16 | |
| 17 | class GrVkGpu; |
| 18 | |
| 19 | // uncomment to enable tracing of resource refs |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 20 | #ifdef SK_DEBUG |
| 21 | #define SK_TRACE_VK_RESOURCES |
| 22 | #endif |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 23 | |
| 24 | /** \class GrVkResource |
| 25 | |
| 26 | GrVkResource is the base class for Vulkan resources that may be shared by multiple |
| 27 | objects. When an existing owner wants to share a reference, it calls ref(). |
| 28 | When an owner wants to release its reference, it calls unref(). When the |
| 29 | shared object's reference count goes to zero as the result of an unref() |
| 30 | call, its (virtual) destructor is called. It is an error for the |
| 31 | destructor to be called explicitly (or via the object going out of scope on |
| 32 | the stack or calling delete) if getRefCnt() > 1. |
| 33 | |
| 34 | This is nearly identical to SkRefCntBase. The exceptions are that unref() |
| 35 | takes a GrVkGpu, and any derived classes must implement freeGPUData() and |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 36 | possibly abandonGPUData(). |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 37 | */ |
| 38 | |
| 39 | class GrVkResource : SkNoncopyable { |
| 40 | public: |
| 41 | // Simple refCount tracing, to ensure that everything ref'ed is unref'ed. |
| 42 | #ifdef SK_TRACE_VK_RESOURCES |
egdaniel | 00db3fd | 2016-07-29 08:55:53 -0700 | [diff] [blame] | 43 | struct Hash { |
| 44 | uint32_t operator()(const GrVkResource* const& r) const { |
| 45 | SkASSERT(r); |
| 46 | return r->fKey; |
| 47 | } |
| 48 | }; |
jvanverth | d5f6e9a | 2016-07-07 08:21:48 -0700 | [diff] [blame] | 49 | |
| 50 | class Trace { |
| 51 | public: |
| 52 | ~Trace() { |
egdaniel | 00db3fd | 2016-07-29 08:55:53 -0700 | [diff] [blame] | 53 | fHashSet.foreach([](const GrVkResource* r) { |
| 54 | r->dumpInfo(); |
| 55 | }); |
| 56 | SkASSERT(0 == fHashSet.count()); |
jvanverth | d5f6e9a | 2016-07-07 08:21:48 -0700 | [diff] [blame] | 57 | } |
egdaniel | 00db3fd | 2016-07-29 08:55:53 -0700 | [diff] [blame] | 58 | void add(const GrVkResource* r) { fHashSet.add(r); } |
| 59 | void remove(const GrVkResource* r) { fHashSet.remove(r); } |
jvanverth | d5f6e9a | 2016-07-07 08:21:48 -0700 | [diff] [blame] | 60 | |
| 61 | private: |
egdaniel | 00db3fd | 2016-07-29 08:55:53 -0700 | [diff] [blame] | 62 | SkTHashSet<const GrVkResource*, GrVkResource::Hash> fHashSet; |
jvanverth | d5f6e9a | 2016-07-07 08:21:48 -0700 | [diff] [blame] | 63 | }; |
jvanverth | d5f6e9a | 2016-07-07 08:21:48 -0700 | [diff] [blame] | 64 | |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 65 | static std::atomic<uint32_t> fKeyCounter; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 66 | #endif |
| 67 | |
| 68 | /** Default construct, initializing the reference count to 1. |
| 69 | */ |
| 70 | GrVkResource() : fRefCnt(1) { |
| 71 | #ifdef SK_TRACE_VK_RESOURCES |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 72 | fKey = fKeyCounter.fetch_add(+1, std::memory_order_relaxed); |
Greg Daniel | 21580ba | 2018-06-26 11:26:44 -0400 | [diff] [blame] | 73 | GetTrace()->add(this); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 74 | #endif |
| 75 | } |
| 76 | |
| 77 | /** Destruct, asserting that the reference count is 1. |
| 78 | */ |
| 79 | virtual ~GrVkResource() { |
| 80 | #ifdef SK_DEBUG |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 81 | auto count = this->getRefCnt(); |
| 82 | SkASSERTF(count == 1, "fRefCnt was %d", count); |
| 83 | fRefCnt.store(0); // illegal value, to catch us if we reuse after delete |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 84 | #endif |
| 85 | } |
| 86 | |
| 87 | #ifdef SK_DEBUG |
| 88 | /** Return the reference count. Use only for debugging. */ |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 89 | int32_t getRefCnt() const { return fRefCnt.load(); } |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 90 | #endif |
| 91 | |
| 92 | /** May return true if the caller is the only owner. |
| 93 | * Ensures that all previous owner's actions are complete. |
| 94 | */ |
| 95 | bool unique() const { |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 96 | // The acquire barrier is only really needed if we return true. It |
| 97 | // prevents code conditioned on the result of unique() from running |
| 98 | // until previous owners are all totally done calling unref(). |
| 99 | return 1 == fRefCnt.load(std::memory_order_acquire); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 100 | } |
| 101 | |
halcanary | 9d524f2 | 2016-03-29 09:03:52 -0700 | [diff] [blame] | 102 | /** Increment the reference count. |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 103 | Must be balanced by a call to unref() or unrefAndFreeResources(). |
| 104 | */ |
| 105 | void ref() const { |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 106 | SkASSERT(this->getRefCnt() > 0); |
| 107 | (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); // No barrier required. |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 108 | } |
| 109 | |
| 110 | /** Decrement the reference count. If the reference count is 1 before the |
| 111 | decrement, then delete the object. Note that if this is the case, then |
| 112 | the object needs to have been allocated via new, and not on the stack. |
| 113 | Any GPU data associated with this resource will be freed before it's deleted. |
| 114 | */ |
| 115 | void unref(const GrVkGpu* gpu) const { |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 116 | SkASSERT(this->getRefCnt() > 0); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 117 | SkASSERT(gpu); |
| 118 | // A release here acts in place of all releases we "should" have been doing in ref(). |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 119 | if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 120 | // Like unique(), the acquire is only needed on success, to make sure |
| 121 | // code in internal_dispose() doesn't happen before the decrement. |
| 122 | this->internal_dispose(gpu); |
| 123 | } |
| 124 | } |
| 125 | |
| 126 | /** Unref without freeing GPU data. Used only when we're abandoning the resource */ |
| 127 | void unrefAndAbandon() const { |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 128 | SkASSERT(this->getRefCnt() > 0); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 129 | // A release here acts in place of all releases we "should" have been doing in ref(). |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 130 | if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 131 | // Like unique(), the acquire is only needed on success, to make sure |
| 132 | // code in internal_dispose() doesn't happen before the decrement. |
| 133 | this->internal_dispose(); |
| 134 | } |
| 135 | } |
| 136 | |
| 137 | #ifdef SK_DEBUG |
| 138 | void validate() const { |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 139 | SkASSERT(this->getRefCnt() > 0); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 140 | } |
| 141 | #endif |
| 142 | |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 143 | #ifdef SK_TRACE_VK_RESOURCES |
| 144 | /** Output a human-readable dump of this resource's information |
| 145 | */ |
| 146 | virtual void dumpInfo() const = 0; |
| 147 | #endif |
| 148 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 149 | private: |
Greg Daniel | 21580ba | 2018-06-26 11:26:44 -0400 | [diff] [blame] | 150 | #ifdef SK_TRACE_VK_RESOURCES |
| 151 | static Trace* GetTrace() { |
| 152 | static Trace kTrace; |
| 153 | return &kTrace; |
| 154 | } |
| 155 | #endif |
| 156 | |
halcanary | 9d524f2 | 2016-03-29 09:03:52 -0700 | [diff] [blame] | 157 | /** Must be implemented by any subclasses. |
| 158 | * Deletes any Vk data associated with this resource |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 159 | */ |
| 160 | virtual void freeGPUData(const GrVkGpu* gpu) const = 0; |
| 161 | |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 162 | /** |
| 163 | * Called from unrefAndAbandon. Resources should do any necessary cleanup without freeing |
| 164 | * underlying Vk objects. This must be overridden by subclasses that themselves store |
| 165 | * GrVkResources since those resource will need to be unrefed. |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 166 | */ |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 167 | virtual void abandonGPUData() const {} |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 168 | |
| 169 | /** |
| 170 | * Called when the ref count goes to 0. Will free Vk resources. |
| 171 | */ |
| 172 | void internal_dispose(const GrVkGpu* gpu) const { |
| 173 | this->freeGPUData(gpu); |
| 174 | #ifdef SK_TRACE_VK_RESOURCES |
Greg Daniel | 21580ba | 2018-06-26 11:26:44 -0400 | [diff] [blame] | 175 | GetTrace()->remove(this); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 176 | #endif |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 177 | |
| 178 | #ifdef SK_DEBUG |
| 179 | SkASSERT(0 == this->getRefCnt()); |
| 180 | fRefCnt.store(1); |
| 181 | #endif |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 182 | delete this; |
| 183 | } |
| 184 | |
| 185 | /** |
| 186 | * Internal_dispose without freeing Vk resources. Used when we've lost context. |
| 187 | */ |
| 188 | void internal_dispose() const { |
Greg Daniel | cef213c | 2017-04-21 11:52:27 -0400 | [diff] [blame] | 189 | this->abandonGPUData(); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 190 | #ifdef SK_TRACE_VK_RESOURCES |
Greg Daniel | 21580ba | 2018-06-26 11:26:44 -0400 | [diff] [blame] | 191 | GetTrace()->remove(this); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 192 | #endif |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 193 | |
| 194 | #ifdef SK_DEBUG |
| 195 | SkASSERT(0 == this->getRefCnt()); |
| 196 | fRefCnt.store(1); |
| 197 | #endif |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 198 | delete this; |
| 199 | } |
| 200 | |
Mike Klein | 820e79b | 2018-12-04 09:31:31 -0500 | [diff] [blame^] | 201 | mutable std::atomic<int32_t> fRefCnt; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 202 | #ifdef SK_TRACE_VK_RESOURCES |
| 203 | uint32_t fKey; |
| 204 | #endif |
| 205 | |
| 206 | typedef SkNoncopyable INHERITED; |
| 207 | }; |
| 208 | |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 209 | // This subclass allows for recycling |
| 210 | class GrVkRecycledResource : public GrVkResource { |
| 211 | public: |
| 212 | // When recycle is called and there is only one ref left on the resource, we will signal that |
egdaniel | a95220d | 2016-07-21 11:50:37 -0700 | [diff] [blame] | 213 | // the resource can be recycled for reuse. If the sublass (or whoever is managing this resource) |
| 214 | // decides not to recycle the objects, it is their responsibility to call unref on the object. |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 215 | void recycle(GrVkGpu* gpu) const { |
| 216 | if (this->unique()) { |
| 217 | this->onRecycle(gpu); |
egdaniel | a95220d | 2016-07-21 11:50:37 -0700 | [diff] [blame] | 218 | } else { |
| 219 | this->unref(gpu); |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 220 | } |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 221 | } |
| 222 | |
| 223 | private: |
| 224 | virtual void onRecycle(GrVkGpu* gpu) const = 0; |
| 225 | }; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 226 | |
| 227 | #endif |