blob: 78b4d3141ba8e1bcbd80c4f15bb416550bd18730 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrVkResource_DEFINED
9#define GrVkResource_DEFINED
10
Greg Daniel54bfb182018-11-20 17:12:36 -050011#include "GrVkVulkan.h"
12
Greg Daniel164a9f02016-02-22 09:56:40 -050013#include "SkAtomics.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "SkRandom.h"
egdaniel00db3fd2016-07-29 08:55:53 -070015#include "SkTHash.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016
17class GrVkGpu;
18
19// uncomment to enable tracing of resource refs
jvanverth7ec92412016-07-06 09:24:57 -070020#ifdef SK_DEBUG
21#define SK_TRACE_VK_RESOURCES
22#endif
Greg Daniel164a9f02016-02-22 09:56:40 -050023
24/** \class GrVkResource
25
26 GrVkResource is the base class for Vulkan resources that may be shared by multiple
27 objects. When an existing owner wants to share a reference, it calls ref().
28 When an owner wants to release its reference, it calls unref(). When the
29 shared object's reference count goes to zero as the result of an unref()
30 call, its (virtual) destructor is called. It is an error for the
31 destructor to be called explicitly (or via the object going out of scope on
32 the stack or calling delete) if getRefCnt() > 1.
33
34 This is nearly identical to SkRefCntBase. The exceptions are that unref()
35 takes a GrVkGpu, and any derived classes must implement freeGPUData() and
Greg Danielcef213c2017-04-21 11:52:27 -040036 possibly abandonGPUData().
Greg Daniel164a9f02016-02-22 09:56:40 -050037*/
38
39class GrVkResource : SkNoncopyable {
40public:
41 // Simple refCount tracing, to ensure that everything ref'ed is unref'ed.
42#ifdef SK_TRACE_VK_RESOURCES
egdaniel00db3fd2016-07-29 08:55:53 -070043 struct Hash {
44 uint32_t operator()(const GrVkResource* const& r) const {
45 SkASSERT(r);
46 return r->fKey;
47 }
48 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070049
50 class Trace {
51 public:
52 ~Trace() {
egdaniel00db3fd2016-07-29 08:55:53 -070053 fHashSet.foreach([](const GrVkResource* r) {
54 r->dumpInfo();
55 });
56 SkASSERT(0 == fHashSet.count());
jvanverthd5f6e9a2016-07-07 08:21:48 -070057 }
egdaniel00db3fd2016-07-29 08:55:53 -070058 void add(const GrVkResource* r) { fHashSet.add(r); }
59 void remove(const GrVkResource* r) { fHashSet.remove(r); }
jvanverthd5f6e9a2016-07-07 08:21:48 -070060
61 private:
egdaniel00db3fd2016-07-29 08:55:53 -070062 SkTHashSet<const GrVkResource*, GrVkResource::Hash> fHashSet;
jvanverthd5f6e9a2016-07-07 08:21:48 -070063 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070064
egdaniel50ead532016-07-13 14:23:26 -070065 static uint32_t fKeyCounter;
Greg Daniel164a9f02016-02-22 09:56:40 -050066#endif
67
68 /** Default construct, initializing the reference count to 1.
69 */
70 GrVkResource() : fRefCnt(1) {
71#ifdef SK_TRACE_VK_RESOURCES
egdaniel50ead532016-07-13 14:23:26 -070072 fKey = sk_atomic_fetch_add(&fKeyCounter, 1u, sk_memory_order_relaxed);
Greg Daniel21580ba2018-06-26 11:26:44 -040073 GetTrace()->add(this);
Greg Daniel164a9f02016-02-22 09:56:40 -050074#endif
75 }
76
77 /** Destruct, asserting that the reference count is 1.
78 */
79 virtual ~GrVkResource() {
80#ifdef SK_DEBUG
81 SkASSERTF(fRefCnt == 1, "fRefCnt was %d", fRefCnt);
82 fRefCnt = 0; // illegal value, to catch us if we reuse after delete
83#endif
84 }
85
86#ifdef SK_DEBUG
87 /** Return the reference count. Use only for debugging. */
88 int32_t getRefCnt() const { return fRefCnt; }
89#endif
90
91 /** May return true if the caller is the only owner.
92 * Ensures that all previous owner's actions are complete.
93 */
94 bool unique() const {
95 if (1 == sk_atomic_load(&fRefCnt, sk_memory_order_acquire)) {
96 // The acquire barrier is only really needed if we return true. It
97 // prevents code conditioned on the result of unique() from running
98 // until previous owners are all totally done calling unref().
99 return true;
100 }
101 return false;
102 }
103
halcanary9d524f22016-03-29 09:03:52 -0700104 /** Increment the reference count.
Greg Daniel164a9f02016-02-22 09:56:40 -0500105 Must be balanced by a call to unref() or unrefAndFreeResources().
106 */
107 void ref() const {
108 SkASSERT(fRefCnt > 0);
109 (void)sk_atomic_fetch_add(&fRefCnt, +1, sk_memory_order_relaxed); // No barrier required.
110 }
111
112 /** Decrement the reference count. If the reference count is 1 before the
113 decrement, then delete the object. Note that if this is the case, then
114 the object needs to have been allocated via new, and not on the stack.
115 Any GPU data associated with this resource will be freed before it's deleted.
116 */
117 void unref(const GrVkGpu* gpu) const {
118 SkASSERT(fRefCnt > 0);
119 SkASSERT(gpu);
120 // A release here acts in place of all releases we "should" have been doing in ref().
121 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) {
122 // Like unique(), the acquire is only needed on success, to make sure
123 // code in internal_dispose() doesn't happen before the decrement.
124 this->internal_dispose(gpu);
125 }
126 }
127
128 /** Unref without freeing GPU data. Used only when we're abandoning the resource */
129 void unrefAndAbandon() const {
130 SkASSERT(fRefCnt > 0);
131 // A release here acts in place of all releases we "should" have been doing in ref().
132 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) {
133 // Like unique(), the acquire is only needed on success, to make sure
134 // code in internal_dispose() doesn't happen before the decrement.
135 this->internal_dispose();
136 }
137 }
138
139#ifdef SK_DEBUG
140 void validate() const {
141 SkASSERT(fRefCnt > 0);
142 }
143#endif
144
jvanverth7ec92412016-07-06 09:24:57 -0700145#ifdef SK_TRACE_VK_RESOURCES
146 /** Output a human-readable dump of this resource's information
147 */
148 virtual void dumpInfo() const = 0;
149#endif
150
Greg Daniel164a9f02016-02-22 09:56:40 -0500151private:
Greg Daniel21580ba2018-06-26 11:26:44 -0400152#ifdef SK_TRACE_VK_RESOURCES
153 static Trace* GetTrace() {
154 static Trace kTrace;
155 return &kTrace;
156 }
157#endif
158
halcanary9d524f22016-03-29 09:03:52 -0700159 /** Must be implemented by any subclasses.
160 * Deletes any Vk data associated with this resource
Greg Daniel164a9f02016-02-22 09:56:40 -0500161 */
162 virtual void freeGPUData(const GrVkGpu* gpu) const = 0;
163
Greg Danielcef213c2017-04-21 11:52:27 -0400164 /**
165 * Called from unrefAndAbandon. Resources should do any necessary cleanup without freeing
166 * underlying Vk objects. This must be overridden by subclasses that themselves store
167 * GrVkResources since those resource will need to be unrefed.
Greg Daniel164a9f02016-02-22 09:56:40 -0500168 */
Greg Danielcef213c2017-04-21 11:52:27 -0400169 virtual void abandonGPUData() const {}
Greg Daniel164a9f02016-02-22 09:56:40 -0500170
171 /**
172 * Called when the ref count goes to 0. Will free Vk resources.
173 */
174 void internal_dispose(const GrVkGpu* gpu) const {
175 this->freeGPUData(gpu);
176#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400177 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500178#endif
179 SkASSERT(0 == fRefCnt);
180 fRefCnt = 1;
181 delete this;
182 }
183
184 /**
185 * Internal_dispose without freeing Vk resources. Used when we've lost context.
186 */
187 void internal_dispose() const {
Greg Danielcef213c2017-04-21 11:52:27 -0400188 this->abandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -0500189#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400190 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500191#endif
192 SkASSERT(0 == fRefCnt);
193 fRefCnt = 1;
194 delete this;
195 }
196
197 mutable int32_t fRefCnt;
198#ifdef SK_TRACE_VK_RESOURCES
199 uint32_t fKey;
200#endif
201
202 typedef SkNoncopyable INHERITED;
203};
204
egdanielc1be9bc2016-07-20 08:33:00 -0700205// This subclass allows for recycling
206class GrVkRecycledResource : public GrVkResource {
207public:
208 // When recycle is called and there is only one ref left on the resource, we will signal that
egdaniela95220d2016-07-21 11:50:37 -0700209 // the resource can be recycled for reuse. If the sublass (or whoever is managing this resource)
210 // decides not to recycle the objects, it is their responsibility to call unref on the object.
egdanielc1be9bc2016-07-20 08:33:00 -0700211 void recycle(GrVkGpu* gpu) const {
212 if (this->unique()) {
213 this->onRecycle(gpu);
egdaniela95220d2016-07-21 11:50:37 -0700214 } else {
215 this->unref(gpu);
egdanielc1be9bc2016-07-20 08:33:00 -0700216 }
egdanielc1be9bc2016-07-20 08:33:00 -0700217 }
218
219private:
220 virtual void onRecycle(GrVkGpu* gpu) const = 0;
221};
Greg Daniel164a9f02016-02-22 09:56:40 -0500222
223#endif