blob: 7b9949ba1b00e14dc2f55a7b447c93c26257cdeb [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrVkResource_DEFINED
9#define GrVkResource_DEFINED
10
Greg Daniel54bfb182018-11-20 17:12:36 -050011
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/private/SkTHash.h"
13#include "include/utils/SkRandom.h"
Mike Klein820e79b2018-12-04 09:31:31 -050014#include <atomic>
Greg Daniel164a9f02016-02-22 09:56:40 -050015
16class GrVkGpu;
17
18// uncomment to enable tracing of resource refs
jvanverth7ec92412016-07-06 09:24:57 -070019#ifdef SK_DEBUG
20#define SK_TRACE_VK_RESOURCES
21#endif
Greg Daniel164a9f02016-02-22 09:56:40 -050022
23/** \class GrVkResource
24
25 GrVkResource is the base class for Vulkan resources that may be shared by multiple
26 objects. When an existing owner wants to share a reference, it calls ref().
27 When an owner wants to release its reference, it calls unref(). When the
28 shared object's reference count goes to zero as the result of an unref()
29 call, its (virtual) destructor is called. It is an error for the
30 destructor to be called explicitly (or via the object going out of scope on
31 the stack or calling delete) if getRefCnt() > 1.
32
33 This is nearly identical to SkRefCntBase. The exceptions are that unref()
34 takes a GrVkGpu, and any derived classes must implement freeGPUData() and
Greg Danielcef213c2017-04-21 11:52:27 -040035 possibly abandonGPUData().
Greg Daniel164a9f02016-02-22 09:56:40 -050036*/
37
38class GrVkResource : SkNoncopyable {
39public:
40 // Simple refCount tracing, to ensure that everything ref'ed is unref'ed.
41#ifdef SK_TRACE_VK_RESOURCES
egdaniel00db3fd2016-07-29 08:55:53 -070042 struct Hash {
43 uint32_t operator()(const GrVkResource* const& r) const {
44 SkASSERT(r);
45 return r->fKey;
46 }
47 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070048
49 class Trace {
50 public:
51 ~Trace() {
egdaniel00db3fd2016-07-29 08:55:53 -070052 fHashSet.foreach([](const GrVkResource* r) {
53 r->dumpInfo();
54 });
55 SkASSERT(0 == fHashSet.count());
jvanverthd5f6e9a2016-07-07 08:21:48 -070056 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -050057
58 void add(const GrVkResource* r) {
59 fHashSet.add(r);
60 }
61
62 void remove(const GrVkResource* r) {
63 fHashSet.remove(r);
64 }
jvanverthd5f6e9a2016-07-07 08:21:48 -070065
66 private:
egdaniel00db3fd2016-07-29 08:55:53 -070067 SkTHashSet<const GrVkResource*, GrVkResource::Hash> fHashSet;
jvanverthd5f6e9a2016-07-07 08:21:48 -070068 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070069
Mike Klein820e79b2018-12-04 09:31:31 -050070 static std::atomic<uint32_t> fKeyCounter;
Greg Daniel164a9f02016-02-22 09:56:40 -050071#endif
72
73 /** Default construct, initializing the reference count to 1.
74 */
75 GrVkResource() : fRefCnt(1) {
76#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050077 fKey = fKeyCounter.fetch_add(+1, std::memory_order_relaxed);
Greg Daniel21580ba2018-06-26 11:26:44 -040078 GetTrace()->add(this);
Greg Daniel164a9f02016-02-22 09:56:40 -050079#endif
80 }
81
82 /** Destruct, asserting that the reference count is 1.
83 */
84 virtual ~GrVkResource() {
85#ifdef SK_DEBUG
Mike Klein820e79b2018-12-04 09:31:31 -050086 auto count = this->getRefCnt();
87 SkASSERTF(count == 1, "fRefCnt was %d", count);
88 fRefCnt.store(0); // illegal value, to catch us if we reuse after delete
Greg Daniel164a9f02016-02-22 09:56:40 -050089#endif
90 }
91
92#ifdef SK_DEBUG
93 /** Return the reference count. Use only for debugging. */
Mike Klein820e79b2018-12-04 09:31:31 -050094 int32_t getRefCnt() const { return fRefCnt.load(); }
Greg Daniel164a9f02016-02-22 09:56:40 -050095#endif
96
97 /** May return true if the caller is the only owner.
98 * Ensures that all previous owner's actions are complete.
99 */
100 bool unique() const {
Mike Klein820e79b2018-12-04 09:31:31 -0500101 // The acquire barrier is only really needed if we return true. It
102 // prevents code conditioned on the result of unique() from running
103 // until previous owners are all totally done calling unref().
104 return 1 == fRefCnt.load(std::memory_order_acquire);
Greg Daniel164a9f02016-02-22 09:56:40 -0500105 }
106
halcanary9d524f22016-03-29 09:03:52 -0700107 /** Increment the reference count.
Greg Daniel164a9f02016-02-22 09:56:40 -0500108 Must be balanced by a call to unref() or unrefAndFreeResources().
109 */
110 void ref() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500111 // No barrier required.
112 SkDEBUGCODE(int newRefCount = )fRefCnt.fetch_add(+1, std::memory_order_relaxed);
113 SkASSERT(newRefCount >= 1);
Greg Daniel164a9f02016-02-22 09:56:40 -0500114 }
115
116 /** Decrement the reference count. If the reference count is 1 before the
117 decrement, then delete the object. Note that if this is the case, then
118 the object needs to have been allocated via new, and not on the stack.
119 Any GPU data associated with this resource will be freed before it's deleted.
120 */
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500121 void unref(GrVkGpu* gpu) const {
Greg Daniel164a9f02016-02-22 09:56:40 -0500122 SkASSERT(gpu);
123 // A release here acts in place of all releases we "should" have been doing in ref().
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500124 int newRefCount = fRefCnt.fetch_add(-1, std::memory_order_acq_rel);
125 SkASSERT(newRefCount >= 0);
126 if (newRefCount == 1) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500127 // Like unique(), the acquire is only needed on success, to make sure
128 // code in internal_dispose() doesn't happen before the decrement.
129 this->internal_dispose(gpu);
130 }
131 }
132
133 /** Unref without freeing GPU data. Used only when we're abandoning the resource */
134 void unrefAndAbandon() const {
Mike Klein820e79b2018-12-04 09:31:31 -0500135 SkASSERT(this->getRefCnt() > 0);
Greg Daniel164a9f02016-02-22 09:56:40 -0500136 // A release here acts in place of all releases we "should" have been doing in ref().
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500137 int newRefCount = fRefCnt.fetch_add(-1, std::memory_order_acq_rel);
138 SkASSERT(newRefCount >= 0);
139 if (newRefCount == 1) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500140 // Like unique(), the acquire is only needed on success, to make sure
141 // code in internal_dispose() doesn't happen before the decrement.
142 this->internal_dispose();
143 }
144 }
145
Brian Salomon614c1a82018-12-19 15:42:06 -0500146 // Called every time this resource is added to a command buffer.
147 virtual void notifyAddedToCommandBuffer() const {}
148 // Called every time this resource is removed from a command buffer (typically because
149 // the command buffer finished execution on the GPU but also when the command buffer
150 // is abandoned.)
151 virtual void notifyRemovedFromCommandBuffer() const {}
152
Greg Daniel164a9f02016-02-22 09:56:40 -0500153#ifdef SK_DEBUG
154 void validate() const {
Mike Klein820e79b2018-12-04 09:31:31 -0500155 SkASSERT(this->getRefCnt() > 0);
Greg Daniel164a9f02016-02-22 09:56:40 -0500156 }
157#endif
158
jvanverth7ec92412016-07-06 09:24:57 -0700159#ifdef SK_TRACE_VK_RESOURCES
160 /** Output a human-readable dump of this resource's information
161 */
162 virtual void dumpInfo() const = 0;
163#endif
164
Greg Daniel164a9f02016-02-22 09:56:40 -0500165private:
Greg Daniel21580ba2018-06-26 11:26:44 -0400166#ifdef SK_TRACE_VK_RESOURCES
167 static Trace* GetTrace() {
168 static Trace kTrace;
169 return &kTrace;
170 }
171#endif
172
halcanary9d524f22016-03-29 09:03:52 -0700173 /** Must be implemented by any subclasses.
174 * Deletes any Vk data associated with this resource
Greg Daniel164a9f02016-02-22 09:56:40 -0500175 */
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500176 virtual void freeGPUData(GrVkGpu* gpu) const = 0;
Greg Daniel164a9f02016-02-22 09:56:40 -0500177
Greg Danielcef213c2017-04-21 11:52:27 -0400178 /**
179 * Called from unrefAndAbandon. Resources should do any necessary cleanup without freeing
180 * underlying Vk objects. This must be overridden by subclasses that themselves store
181 * GrVkResources since those resource will need to be unrefed.
Greg Daniel164a9f02016-02-22 09:56:40 -0500182 */
Greg Danielcef213c2017-04-21 11:52:27 -0400183 virtual void abandonGPUData() const {}
Greg Daniel164a9f02016-02-22 09:56:40 -0500184
185 /**
186 * Called when the ref count goes to 0. Will free Vk resources.
187 */
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500188 void internal_dispose(GrVkGpu* gpu) const {
Greg Daniel164a9f02016-02-22 09:56:40 -0500189 this->freeGPUData(gpu);
190#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400191 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500192#endif
Mike Klein820e79b2018-12-04 09:31:31 -0500193
194#ifdef SK_DEBUG
195 SkASSERT(0 == this->getRefCnt());
196 fRefCnt.store(1);
197#endif
Greg Daniel164a9f02016-02-22 09:56:40 -0500198 delete this;
199 }
200
201 /**
202 * Internal_dispose without freeing Vk resources. Used when we've lost context.
203 */
204 void internal_dispose() const {
Greg Danielcef213c2017-04-21 11:52:27 -0400205 this->abandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -0500206#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400207 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500208#endif
Mike Klein820e79b2018-12-04 09:31:31 -0500209
210#ifdef SK_DEBUG
211 SkASSERT(0 == this->getRefCnt());
212 fRefCnt.store(1);
213#endif
Greg Daniel164a9f02016-02-22 09:56:40 -0500214 delete this;
215 }
216
Mike Klein820e79b2018-12-04 09:31:31 -0500217 mutable std::atomic<int32_t> fRefCnt;
Greg Daniel164a9f02016-02-22 09:56:40 -0500218#ifdef SK_TRACE_VK_RESOURCES
219 uint32_t fKey;
220#endif
221
222 typedef SkNoncopyable INHERITED;
223};
224
egdanielc1be9bc2016-07-20 08:33:00 -0700225// This subclass allows for recycling
226class GrVkRecycledResource : public GrVkResource {
227public:
228 // When recycle is called and there is only one ref left on the resource, we will signal that
egdaniela95220d2016-07-21 11:50:37 -0700229 // the resource can be recycled for reuse. If the sublass (or whoever is managing this resource)
230 // decides not to recycle the objects, it is their responsibility to call unref on the object.
egdanielc1be9bc2016-07-20 08:33:00 -0700231 void recycle(GrVkGpu* gpu) const {
232 if (this->unique()) {
233 this->onRecycle(gpu);
egdaniela95220d2016-07-21 11:50:37 -0700234 } else {
235 this->unref(gpu);
egdanielc1be9bc2016-07-20 08:33:00 -0700236 }
egdanielc1be9bc2016-07-20 08:33:00 -0700237 }
238
239private:
240 virtual void onRecycle(GrVkGpu* gpu) const = 0;
241};
Greg Daniel164a9f02016-02-22 09:56:40 -0500242
243#endif