blob: 12319969195796f76e71840788b1f92cc3bfb0ad [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrVkResource_DEFINED
9#define GrVkResource_DEFINED
10
Greg Daniel54bfb182018-11-20 17:12:36 -050011#include "GrVkVulkan.h"
12
Greg Daniel164a9f02016-02-22 09:56:40 -050013#include "SkRandom.h"
egdaniel00db3fd2016-07-29 08:55:53 -070014#include "SkTHash.h"
Mike Klein820e79b2018-12-04 09:31:31 -050015#include <atomic>
Greg Daniel164a9f02016-02-22 09:56:40 -050016
17class GrVkGpu;
18
19// uncomment to enable tracing of resource refs
jvanverth7ec92412016-07-06 09:24:57 -070020#ifdef SK_DEBUG
21#define SK_TRACE_VK_RESOURCES
22#endif
Greg Daniel164a9f02016-02-22 09:56:40 -050023
24/** \class GrVkResource
25
26 GrVkResource is the base class for Vulkan resources that may be shared by multiple
27 objects. When an existing owner wants to share a reference, it calls ref().
28 When an owner wants to release its reference, it calls unref(). When the
29 shared object's reference count goes to zero as the result of an unref()
30 call, its (virtual) destructor is called. It is an error for the
31 destructor to be called explicitly (or via the object going out of scope on
32 the stack or calling delete) if getRefCnt() > 1.
33
34 This is nearly identical to SkRefCntBase. The exceptions are that unref()
35 takes a GrVkGpu, and any derived classes must implement freeGPUData() and
Greg Danielcef213c2017-04-21 11:52:27 -040036 possibly abandonGPUData().
Greg Daniel164a9f02016-02-22 09:56:40 -050037*/
38
39class GrVkResource : SkNoncopyable {
40public:
41 // Simple refCount tracing, to ensure that everything ref'ed is unref'ed.
42#ifdef SK_TRACE_VK_RESOURCES
egdaniel00db3fd2016-07-29 08:55:53 -070043 struct Hash {
44 uint32_t operator()(const GrVkResource* const& r) const {
45 SkASSERT(r);
46 return r->fKey;
47 }
48 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070049
50 class Trace {
51 public:
52 ~Trace() {
egdaniel00db3fd2016-07-29 08:55:53 -070053 fHashSet.foreach([](const GrVkResource* r) {
54 r->dumpInfo();
55 });
56 SkASSERT(0 == fHashSet.count());
jvanverthd5f6e9a2016-07-07 08:21:48 -070057 }
egdaniel00db3fd2016-07-29 08:55:53 -070058 void add(const GrVkResource* r) { fHashSet.add(r); }
59 void remove(const GrVkResource* r) { fHashSet.remove(r); }
jvanverthd5f6e9a2016-07-07 08:21:48 -070060
61 private:
egdaniel00db3fd2016-07-29 08:55:53 -070062 SkTHashSet<const GrVkResource*, GrVkResource::Hash> fHashSet;
jvanverthd5f6e9a2016-07-07 08:21:48 -070063 };
jvanverthd5f6e9a2016-07-07 08:21:48 -070064
Mike Klein820e79b2018-12-04 09:31:31 -050065 static std::atomic<uint32_t> fKeyCounter;
Greg Daniel164a9f02016-02-22 09:56:40 -050066#endif
67
68 /** Default construct, initializing the reference count to 1.
69 */
70 GrVkResource() : fRefCnt(1) {
71#ifdef SK_TRACE_VK_RESOURCES
Mike Klein820e79b2018-12-04 09:31:31 -050072 fKey = fKeyCounter.fetch_add(+1, std::memory_order_relaxed);
Greg Daniel21580ba2018-06-26 11:26:44 -040073 GetTrace()->add(this);
Greg Daniel164a9f02016-02-22 09:56:40 -050074#endif
75 }
76
77 /** Destruct, asserting that the reference count is 1.
78 */
79 virtual ~GrVkResource() {
80#ifdef SK_DEBUG
Mike Klein820e79b2018-12-04 09:31:31 -050081 auto count = this->getRefCnt();
82 SkASSERTF(count == 1, "fRefCnt was %d", count);
83 fRefCnt.store(0); // illegal value, to catch us if we reuse after delete
Greg Daniel164a9f02016-02-22 09:56:40 -050084#endif
85 }
86
87#ifdef SK_DEBUG
88 /** Return the reference count. Use only for debugging. */
Mike Klein820e79b2018-12-04 09:31:31 -050089 int32_t getRefCnt() const { return fRefCnt.load(); }
Greg Daniel164a9f02016-02-22 09:56:40 -050090#endif
91
92 /** May return true if the caller is the only owner.
93 * Ensures that all previous owner's actions are complete.
94 */
95 bool unique() const {
Mike Klein820e79b2018-12-04 09:31:31 -050096 // The acquire barrier is only really needed if we return true. It
97 // prevents code conditioned on the result of unique() from running
98 // until previous owners are all totally done calling unref().
99 return 1 == fRefCnt.load(std::memory_order_acquire);
Greg Daniel164a9f02016-02-22 09:56:40 -0500100 }
101
halcanary9d524f22016-03-29 09:03:52 -0700102 /** Increment the reference count.
Greg Daniel164a9f02016-02-22 09:56:40 -0500103 Must be balanced by a call to unref() or unrefAndFreeResources().
104 */
105 void ref() const {
Mike Klein820e79b2018-12-04 09:31:31 -0500106 SkASSERT(this->getRefCnt() > 0);
107 (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); // No barrier required.
Greg Daniel164a9f02016-02-22 09:56:40 -0500108 }
109
110 /** Decrement the reference count. If the reference count is 1 before the
111 decrement, then delete the object. Note that if this is the case, then
112 the object needs to have been allocated via new, and not on the stack.
113 Any GPU data associated with this resource will be freed before it's deleted.
114 */
115 void unref(const GrVkGpu* gpu) const {
Mike Klein820e79b2018-12-04 09:31:31 -0500116 SkASSERT(this->getRefCnt() > 0);
Greg Daniel164a9f02016-02-22 09:56:40 -0500117 SkASSERT(gpu);
118 // A release here acts in place of all releases we "should" have been doing in ref().
Mike Klein820e79b2018-12-04 09:31:31 -0500119 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500120 // Like unique(), the acquire is only needed on success, to make sure
121 // code in internal_dispose() doesn't happen before the decrement.
122 this->internal_dispose(gpu);
123 }
124 }
125
126 /** Unref without freeing GPU data. Used only when we're abandoning the resource */
127 void unrefAndAbandon() const {
Mike Klein820e79b2018-12-04 09:31:31 -0500128 SkASSERT(this->getRefCnt() > 0);
Greg Daniel164a9f02016-02-22 09:56:40 -0500129 // A release here acts in place of all releases we "should" have been doing in ref().
Mike Klein820e79b2018-12-04 09:31:31 -0500130 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500131 // Like unique(), the acquire is only needed on success, to make sure
132 // code in internal_dispose() doesn't happen before the decrement.
133 this->internal_dispose();
134 }
135 }
136
137#ifdef SK_DEBUG
138 void validate() const {
Mike Klein820e79b2018-12-04 09:31:31 -0500139 SkASSERT(this->getRefCnt() > 0);
Greg Daniel164a9f02016-02-22 09:56:40 -0500140 }
141#endif
142
jvanverth7ec92412016-07-06 09:24:57 -0700143#ifdef SK_TRACE_VK_RESOURCES
144 /** Output a human-readable dump of this resource's information
145 */
146 virtual void dumpInfo() const = 0;
147#endif
148
Greg Daniel164a9f02016-02-22 09:56:40 -0500149private:
Greg Daniel21580ba2018-06-26 11:26:44 -0400150#ifdef SK_TRACE_VK_RESOURCES
151 static Trace* GetTrace() {
152 static Trace kTrace;
153 return &kTrace;
154 }
155#endif
156
halcanary9d524f22016-03-29 09:03:52 -0700157 /** Must be implemented by any subclasses.
158 * Deletes any Vk data associated with this resource
Greg Daniel164a9f02016-02-22 09:56:40 -0500159 */
160 virtual void freeGPUData(const GrVkGpu* gpu) const = 0;
161
Greg Danielcef213c2017-04-21 11:52:27 -0400162 /**
163 * Called from unrefAndAbandon. Resources should do any necessary cleanup without freeing
164 * underlying Vk objects. This must be overridden by subclasses that themselves store
165 * GrVkResources since those resource will need to be unrefed.
Greg Daniel164a9f02016-02-22 09:56:40 -0500166 */
Greg Danielcef213c2017-04-21 11:52:27 -0400167 virtual void abandonGPUData() const {}
Greg Daniel164a9f02016-02-22 09:56:40 -0500168
169 /**
170 * Called when the ref count goes to 0. Will free Vk resources.
171 */
172 void internal_dispose(const GrVkGpu* gpu) const {
173 this->freeGPUData(gpu);
174#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400175 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500176#endif
Mike Klein820e79b2018-12-04 09:31:31 -0500177
178#ifdef SK_DEBUG
179 SkASSERT(0 == this->getRefCnt());
180 fRefCnt.store(1);
181#endif
Greg Daniel164a9f02016-02-22 09:56:40 -0500182 delete this;
183 }
184
185 /**
186 * Internal_dispose without freeing Vk resources. Used when we've lost context.
187 */
188 void internal_dispose() const {
Greg Danielcef213c2017-04-21 11:52:27 -0400189 this->abandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -0500190#ifdef SK_TRACE_VK_RESOURCES
Greg Daniel21580ba2018-06-26 11:26:44 -0400191 GetTrace()->remove(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500192#endif
Mike Klein820e79b2018-12-04 09:31:31 -0500193
194#ifdef SK_DEBUG
195 SkASSERT(0 == this->getRefCnt());
196 fRefCnt.store(1);
197#endif
Greg Daniel164a9f02016-02-22 09:56:40 -0500198 delete this;
199 }
200
Mike Klein820e79b2018-12-04 09:31:31 -0500201 mutable std::atomic<int32_t> fRefCnt;
Greg Daniel164a9f02016-02-22 09:56:40 -0500202#ifdef SK_TRACE_VK_RESOURCES
203 uint32_t fKey;
204#endif
205
206 typedef SkNoncopyable INHERITED;
207};
208
egdanielc1be9bc2016-07-20 08:33:00 -0700209// This subclass allows for recycling
210class GrVkRecycledResource : public GrVkResource {
211public:
212 // When recycle is called and there is only one ref left on the resource, we will signal that
egdaniela95220d2016-07-21 11:50:37 -0700213 // the resource can be recycled for reuse. If the sublass (or whoever is managing this resource)
214 // decides not to recycle the objects, it is their responsibility to call unref on the object.
egdanielc1be9bc2016-07-20 08:33:00 -0700215 void recycle(GrVkGpu* gpu) const {
216 if (this->unique()) {
217 this->onRecycle(gpu);
egdaniela95220d2016-07-21 11:50:37 -0700218 } else {
219 this->unref(gpu);
egdanielc1be9bc2016-07-20 08:33:00 -0700220 }
egdanielc1be9bc2016-07-20 08:33:00 -0700221 }
222
223private:
224 virtual void onRecycle(GrVkGpu* gpu) const = 0;
225};
Greg Daniel164a9f02016-02-22 09:56:40 -0500226
227#endif