blob: 4b42ecbf3e70b12371353278e6e4e694141c37e8 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrVkResource_DEFINED
9#define GrVkResource_DEFINED
10
11#include "SkAtomics.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "SkRandom.h"
egdaniel00db3fd2016-07-29 08:55:53 -070013#include "SkTHash.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014
15class GrVkGpu;
16
17// uncomment to enable tracing of resource refs
jvanverth7ec92412016-07-06 09:24:57 -070018#ifdef SK_DEBUG
19#define SK_TRACE_VK_RESOURCES
20#endif
Greg Daniel164a9f02016-02-22 09:56:40 -050021
22/** \class GrVkResource
23
24 GrVkResource is the base class for Vulkan resources that may be shared by multiple
25 objects. When an existing owner wants to share a reference, it calls ref().
26 When an owner wants to release its reference, it calls unref(). When the
27 shared object's reference count goes to zero as the result of an unref()
28 call, its (virtual) destructor is called. It is an error for the
29 destructor to be called explicitly (or via the object going out of scope on
30 the stack or calling delete) if getRefCnt() > 1.
31
32 This is nearly identical to SkRefCntBase. The exceptions are that unref()
33 takes a GrVkGpu, and any derived classes must implement freeGPUData() and
Greg Danielcef213c2017-04-21 11:52:27 -040034 possibly abandonGPUData().
Greg Daniel164a9f02016-02-22 09:56:40 -050035*/
36
37class GrVkResource : SkNoncopyable {
38public:
39 // Simple refCount tracing, to ensure that everything ref'ed is unref'ed.
40#ifdef SK_TRACE_VK_RESOURCES
egdaniel00db3fd2016-07-29 08:55:53 -070041 struct Hash {
42 uint32_t operator()(const GrVkResource* const& r) const {
43 SkASSERT(r);
44 return r->fKey;
45 }
46 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070047
48 class Trace {
49 public:
50 ~Trace() {
egdaniel00db3fd2016-07-29 08:55:53 -070051 fHashSet.foreach([](const GrVkResource* r) {
52 r->dumpInfo();
53 });
54 SkASSERT(0 == fHashSet.count());
jvanverthd5f6e9a2016-07-07 08:21:48 -070055 }
egdaniel00db3fd2016-07-29 08:55:53 -070056 void add(const GrVkResource* r) { fHashSet.add(r); }
57 void remove(const GrVkResource* r) { fHashSet.remove(r); }
jvanverthd5f6e9a2016-07-07 08:21:48 -070058
59 private:
egdaniel00db3fd2016-07-29 08:55:53 -070060 SkTHashSet<const GrVkResource*, GrVkResource::Hash> fHashSet;
jvanverthd5f6e9a2016-07-07 08:21:48 -070061 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070062
egdaniel50ead532016-07-13 14:23:26 -070063 static uint32_t fKeyCounter;
Greg Daniel164a9f02016-02-22 09:56:40 -050064#endif
65
66 /** Default construct, initializing the reference count to 1.
67 */
68 GrVkResource() : fRefCnt(1) {
69#ifdef SK_TRACE_VK_RESOURCES
egdaniel50ead532016-07-13 14:23:26 -070070 fKey = sk_atomic_fetch_add(&fKeyCounter, 1u, sk_memory_order_relaxed);
Greg Daniel21580ba2018-06-26 11:26:44 -040071 GetTrace()->add(this);
Greg Daniel164a9f02016-02-22 09:56:40 -050072#endif
73 }
74
75 /** Destruct, asserting that the reference count is 1.
76 */
77 virtual ~GrVkResource() {
78#ifdef SK_DEBUG
79 SkASSERTF(fRefCnt == 1, "fRefCnt was %d", fRefCnt);
80 fRefCnt = 0; // illegal value, to catch us if we reuse after delete
81#endif
82 }
83
84#ifdef SK_DEBUG
85 /** Return the reference count. Use only for debugging. */
86 int32_t getRefCnt() const { return fRefCnt; }
87#endif
88
89 /** May return true if the caller is the only owner.
90 * Ensures that all previous owner's actions are complete.
91 */
92 bool unique() const {
93 if (1 == sk_atomic_load(&fRefCnt, sk_memory_order_acquire)) {
94 // The acquire barrier is only really needed if we return true. It
95 // prevents code conditioned on the result of unique() from running
96 // until previous owners are all totally done calling unref().
97 return true;
98 }
99 return false;
100 }
101
halcanary9d524f22016-03-29 09:03:52 -0700102 /** Increment the reference count.
Greg Daniel164a9f02016-02-22 09:56:40 -0500103 Must be balanced by a call to unref() or unrefAndFreeResources().
104 */
105 void ref() const {
106 SkASSERT(fRefCnt > 0);
107 (void)sk_atomic_fetch_add(&fRefCnt, +1, sk_memory_order_relaxed); // No barrier required.
108 }
109
110 /** Decrement the reference count. If the reference count is 1 before the
111 decrement, then delete the object. Note that if this is the case, then
112 the object needs to have been allocated via new, and not on the stack.
113 Any GPU data associated with this resource will be freed before it's deleted.
114 */
115 void unref(const GrVkGpu* gpu) const {
116 SkASSERT(fRefCnt > 0);
117 SkASSERT(gpu);
118 // A release here acts in place of all releases we "should" have been doing in ref().
119 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) {
120 // Like unique(), the acquire is only needed on success, to make sure
121 // code in internal_dispose() doesn't happen before the decrement.
122 this->internal_dispose(gpu);
123 }
124 }
125
126 /** Unref without freeing GPU data. Used only when we're abandoning the resource */
127 void unrefAndAbandon() const {
128 SkASSERT(fRefCnt > 0);
129 // A release here acts in place of all releases we "should" have been doing in ref().
130 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) {
131 // Like unique(), the acquire is only needed on success, to make sure
132 // code in internal_dispose() doesn't happen before the decrement.
133 this->internal_dispose();
134 }
135 }
136
137#ifdef SK_DEBUG
138 void validate() const {
139 SkASSERT(fRefCnt > 0);
140 }
141#endif
142
jvanverth7ec92412016-07-06 09:24:57 -0700143#ifdef SK_TRACE_VK_RESOURCES
144 /** Output a human-readable dump of this resource's information
145 */
146 virtual void dumpInfo() const = 0;
147#endif
148
Greg Daniel164a9f02016-02-22 09:56:40 -0500149private:
Greg Daniel21580ba2018-06-26 11:26:44 -0400150#ifdef SK_TRACE_VK_RESOURCES
151 static Trace* GetTrace() {
152 static Trace kTrace;
153 return &kTrace;
154 }
155#endif
156
halcanary9d524f22016-03-29 09:03:52 -0700157 /** Must be implemented by any subclasses.
158 * Deletes any Vk data associated with this resource
Greg Daniel164a9f02016-02-22 09:56:40 -0500159 */
160 virtual void freeGPUData(const GrVkGpu* gpu) const = 0;
161
Greg Danielcef213c2017-04-21 11:52:27 -0400162 /**
163 * Called from unrefAndAbandon. Resources should do any necessary cleanup without freeing
164 * underlying Vk objects. This must be overridden by subclasses that themselves store
165 * GrVkResources since those resource will need to be unrefed.
Greg Daniel164a9f02016-02-22 09:56:40 -0500166 */
Greg Danielcef213c2017-04-21 11:52:27 -0400167 virtual void abandonGPUData() const {}
Greg Daniel164a9f02016-02-22 09:56:40 -0500168
169 /**
170 * Called when the ref count goes to 0. Will free Vk resources.
171 */
172 void internal_dispose(const GrVkGpu* gpu) const {
173 this->freeGPUData(gpu);
174#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400175 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500176#endif
177 SkASSERT(0 == fRefCnt);
178 fRefCnt = 1;
179 delete this;
180 }
181
182 /**
183 * Internal_dispose without freeing Vk resources. Used when we've lost context.
184 */
185 void internal_dispose() const {
Greg Danielcef213c2017-04-21 11:52:27 -0400186 this->abandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -0500187#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400188 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500189#endif
190 SkASSERT(0 == fRefCnt);
191 fRefCnt = 1;
192 delete this;
193 }
194
195 mutable int32_t fRefCnt;
196#ifdef SK_TRACE_VK_RESOURCES
197 uint32_t fKey;
198#endif
199
200 typedef SkNoncopyable INHERITED;
201};
202
egdanielc1be9bc2016-07-20 08:33:00 -0700203// This subclass allows for recycling
204class GrVkRecycledResource : public GrVkResource {
205public:
206 // When recycle is called and there is only one ref left on the resource, we will signal that
egdaniela95220d2016-07-21 11:50:37 -0700207 // the resource can be recycled for reuse. If the sublass (or whoever is managing this resource)
208 // decides not to recycle the objects, it is their responsibility to call unref on the object.
egdanielc1be9bc2016-07-20 08:33:00 -0700209 void recycle(GrVkGpu* gpu) const {
210 if (this->unique()) {
211 this->onRecycle(gpu);
egdaniela95220d2016-07-21 11:50:37 -0700212 } else {
213 this->unref(gpu);
egdanielc1be9bc2016-07-20 08:33:00 -0700214 }
egdanielc1be9bc2016-07-20 08:33:00 -0700215 }
216
217private:
218 virtual void onRecycle(GrVkGpu* gpu) const = 0;
219};
Greg Daniel164a9f02016-02-22 09:56:40 -0500220
221#endif