blob: 89e65445710bdc3c17522db2a6cabb8ce4741f4f [file] [log] [blame]
reed@android.com8a1c16f2008-12-17 15:59:43 +00001/*
epoger@google.comec3ed6a2011-07-28 14:26:00 +00002 * Copyright 2006 The Android Open Source Project
reed@android.com8a1c16f2008-12-17 15:59:43 +00003 *
epoger@google.comec3ed6a2011-07-28 14:26:00 +00004 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
reed@android.com8a1c16f2008-12-17 15:59:43 +00006 */
7
8#ifndef SkRefCnt_DEFINED
9#define SkRefCnt_DEFINED
10
bungemanf3c15b72015-08-19 11:56:48 -070011#include "SkTypes.h"
Hal Canary22479652018-06-20 16:13:05 -040012
bungeman2c4bd072016-04-08 06:58:51 -070013#include <atomic>
Hal Canary22479652018-06-20 16:13:05 -040014#include <cstddef>
bungemanc901c112016-03-08 08:35:23 -080015#include <functional>
Ben Wagner5d1adbf2018-05-28 13:35:39 -040016#include <ostream>
bungeman801b44c2016-04-28 11:18:07 -070017#include <type_traits>
halcanary217c0b32016-03-02 08:06:20 -080018#include <utility>
reed@android.com8a1c16f2008-12-17 15:59:43 +000019
Hal Canary61582512018-06-21 10:33:52 -040020// These two headers will be removed in a later CL:
21#include <memory>
22#include <array>
23
commit-bot@chromium.org6d2533e2013-10-16 15:15:58 +000024/** \class SkRefCntBase
25
bungeman@google.com10ba0062013-10-25 18:40:24 +000026 SkRefCntBase is the base class for objects that may be shared by multiple
bungeman@google.coma02bc152012-05-16 18:21:56 +000027 objects. When an existing owner wants to share a reference, it calls ref().
28 When an owner wants to release its reference, it calls unref(). When the
29 shared object's reference count goes to zero as the result of an unref()
30 call, its (virtual) destructor is called. It is an error for the
31 destructor to be called explicitly (or via the object going out of scope on
32 the stack or calling delete) if getRefCnt() > 1.
reed@android.com8a1c16f2008-12-17 15:59:43 +000033*/
Hal Canarye08ce402018-06-14 14:39:33 -040034class SK_API SkRefCntBase {
reed@android.com8a1c16f2008-12-17 15:59:43 +000035public:
36 /** Default construct, initializing the reference count to 1.
37 */
bungeman@google.com10ba0062013-10-25 18:40:24 +000038 SkRefCntBase() : fRefCnt(1) {}
Hal Canarye08ce402018-06-14 14:39:33 -040039 SkRefCntBase(SkRefCntBase&&) = delete;
40 SkRefCntBase(const SkRefCntBase&) = delete;
41 SkRefCntBase& operator=(SkRefCntBase&&) = delete;
42 SkRefCntBase& operator=(const SkRefCntBase&) = delete;
reed@android.com8a1c16f2008-12-17 15:59:43 +000043
bungeman@google.coma02bc152012-05-16 18:21:56 +000044 /** Destruct, asserting that the reference count is 1.
reed@android.com8a1c16f2008-12-17 15:59:43 +000045 */
bungeman@google.com10ba0062013-10-25 18:40:24 +000046 virtual ~SkRefCntBase() {
reed@google.com4c888aa2011-09-12 19:54:12 +000047#ifdef SK_DEBUG
bungeman2c4bd072016-04-08 06:58:51 -070048 SkASSERTF(getRefCnt() == 1, "fRefCnt was %d", getRefCnt());
49 // illegal value, to catch us if we reuse after delete
50 fRefCnt.store(0, std::memory_order_relaxed);
reed@google.com4c888aa2011-09-12 19:54:12 +000051#endif
52 }
reed@android.com8a1c16f2008-12-17 15:59:43 +000053
mtklein21180e22014-11-25 07:30:19 -080054#ifdef SK_DEBUG
bungeman@google.comf64c6842013-07-19 23:18:52 +000055 /** Return the reference count. Use only for debugging. */
bungeman2c4bd072016-04-08 06:58:51 -070056 int32_t getRefCnt() const {
57 return fRefCnt.load(std::memory_order_relaxed);
58 }
59
60 void validate() const {
61 SkASSERT(getRefCnt() > 0);
62 }
mtklein21180e22014-11-25 07:30:19 -080063#endif
reed@android.com8a1c16f2008-12-17 15:59:43 +000064
commit-bot@chromium.orgea6e14a2014-02-04 18:00:23 +000065 /** May return true if the caller is the only owner.
bungeman@google.comf64c6842013-07-19 23:18:52 +000066 * Ensures that all previous owner's actions are complete.
67 */
68 bool unique() const {
bungeman2c4bd072016-04-08 06:58:51 -070069 if (1 == fRefCnt.load(std::memory_order_acquire)) {
mtklein7b274c72015-02-03 13:38:58 -080070 // The acquire barrier is only really needed if we return true. It
71 // prevents code conditioned on the result of unique() from running
72 // until previous owners are all totally done calling unref().
73 return true;
bungeman@google.comf64c6842013-07-19 23:18:52 +000074 }
mtklein7b274c72015-02-03 13:38:58 -080075 return false;
bungeman@google.comf64c6842013-07-19 23:18:52 +000076 }
77
reed@android.com8a1c16f2008-12-17 15:59:43 +000078 /** Increment the reference count. Must be balanced by a call to unref().
79 */
80 void ref() const {
bungeman2c4bd072016-04-08 06:58:51 -070081 SkASSERT(getRefCnt() > 0);
bungeman2c4bd072016-04-08 06:58:51 -070082 // No barrier required.
83 (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
reed@android.com8a1c16f2008-12-17 15:59:43 +000084 }
85
86 /** Decrement the reference count. If the reference count is 1 before the
bungeman@google.coma02bc152012-05-16 18:21:56 +000087 decrement, then delete the object. Note that if this is the case, then
88 the object needs to have been allocated via new, and not on the stack.
reed@android.com8a1c16f2008-12-17 15:59:43 +000089 */
90 void unref() const {
bungeman2c4bd072016-04-08 06:58:51 -070091 SkASSERT(getRefCnt() > 0);
mtklein7b274c72015-02-03 13:38:58 -080092 // A release here acts in place of all releases we "should" have been doing in ref().
bungeman2c4bd072016-04-08 06:58:51 -070093 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
mtklein7b274c72015-02-03 13:38:58 -080094 // Like unique(), the acquire is only needed on success, to make sure
95 // code in internal_dispose() doesn't happen before the decrement.
96 this->internal_dispose();
reed@android.com8a1c16f2008-12-17 15:59:43 +000097 }
98 }
reed@android.com8a1c16f2008-12-17 15:59:43 +000099
reed@google.comf7943032012-07-23 14:50:38 +0000100protected:
101 /**
102 * Allow subclasses to call this if they've overridden internal_dispose
mtklein9c5052f2016-08-06 12:51:51 -0700103 * so they can reset fRefCnt before the destructor is called or if they
104 * choose not to call the destructor (e.g. using a free list).
reed@google.comf7943032012-07-23 14:50:38 +0000105 */
106 void internal_dispose_restore_refcnt_to_1() const {
bungeman2c4bd072016-04-08 06:58:51 -0700107 SkASSERT(0 == getRefCnt());
108 fRefCnt.store(1, std::memory_order_relaxed);
reed@google.comf7943032012-07-23 14:50:38 +0000109 }
110
111private:
112 /**
113 * Called when the ref count goes to 0.
114 */
115 virtual void internal_dispose() const {
116 this->internal_dispose_restore_refcnt_to_1();
halcanary385fe4d2015-08-26 13:07:48 -0700117 delete this;
bungeman@google.coma02bc152012-05-16 18:21:56 +0000118 }
robertphillips@google.com15c0fea2012-06-22 12:41:43 +0000119
bungeman@google.comf64c6842013-07-19 23:18:52 +0000120 // The following friends are those which override internal_dispose()
121 // and conditionally call SkRefCnt::internal_dispose().
bungeman@google.coma02bc152012-05-16 18:21:56 +0000122 friend class SkWeakRefCnt;
123
bungeman2c4bd072016-04-08 06:58:51 -0700124 mutable std::atomic<int32_t> fRefCnt;
reed@android.com8a1c16f2008-12-17 15:59:43 +0000125};
126
bungeman@google.com10ba0062013-10-25 18:40:24 +0000127#ifdef SK_REF_CNT_MIXIN_INCLUDE
128// It is the responsibility of the following include to define the type SkRefCnt.
129// This SkRefCnt should normally derive from SkRefCntBase.
130#include SK_REF_CNT_MIXIN_INCLUDE
131#else
benjaminwagnerf9634b92016-04-13 17:51:57 -0700132class SK_API SkRefCnt : public SkRefCntBase {
133 // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
Mike Klein6613cc52017-12-19 09:09:33 -0500134 #if defined(SK_BUILD_FOR_GOOGLE3)
benjaminwagnerf9634b92016-04-13 17:51:57 -0700135 public:
136 void deref() const { this->unref(); }
137 #endif
138};
bungeman@google.com10ba0062013-10-25 18:40:24 +0000139#endif
140
reed@google.com7f6d6d42011-07-15 15:25:22 +0000141///////////////////////////////////////////////////////////////////////////////
142
143/** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for
144 null in on each side of the assignment, and ensuring that ref() is called
145 before unref(), in case the two pointers point to the same object.
146 */
Derek Sollenberger6dd88142017-03-22 09:56:37 -0400147
Derek Sollenberger3849b642017-03-23 08:54:08 -0400148#if defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
Derek Sollenberger6dd88142017-03-22 09:56:37 -0400149// This version heuristically detects data races, since those otherwise result
150// in redundant reference count decrements, which are exceedingly
151// difficult to debug.
152
153#define SkRefCnt_SafeAssign(dst, src) \
154 do { \
155 typedef typename std::remove_reference<decltype(dst)>::type \
156 SkRefCntPtrT; \
157 SkRefCntPtrT old_dst = *const_cast<SkRefCntPtrT volatile *>(&dst); \
158 if (src) src->ref(); \
159 if (old_dst) old_dst->unref(); \
160 if (old_dst != *const_cast<SkRefCntPtrT volatile *>(&dst)) { \
161 SkDebugf("Detected racing Skia calls at %s:%d\n", \
162 __FILE__, __LINE__); \
163 } \
164 dst = src; \
165 } while (0)
166
Derek Sollenberger3849b642017-03-23 08:54:08 -0400167#else /* !SK_BUILD_FOR_ANDROID_FRAMEWORK */
Derek Sollenberger6dd88142017-03-22 09:56:37 -0400168
reed@google.com7f6d6d42011-07-15 15:25:22 +0000169#define SkRefCnt_SafeAssign(dst, src) \
170 do { \
171 if (src) src->ref(); \
172 if (dst) dst->unref(); \
173 dst = src; \
174 } while (0)
175
Derek Sollenberger6dd88142017-03-22 09:56:37 -0400176#endif
177
reed@google.com7f6d6d42011-07-15 15:25:22 +0000178
bungeman77a53de2015-10-01 12:28:49 -0700179/** Call obj->ref() and return obj. The obj must not be nullptr.
reed@google.com7f6d6d42011-07-15 15:25:22 +0000180 */
bungeman@google.com1fd201b2012-08-22 18:56:56 +0000181template <typename T> static inline T* SkRef(T* obj) {
182 SkASSERT(obj);
183 obj->ref();
184 return obj;
185}
186
187/** Check if the argument is non-null, and if so, call obj->ref() and return obj.
188 */
189template <typename T> static inline T* SkSafeRef(T* obj) {
reed@google.com7f6d6d42011-07-15 15:25:22 +0000190 if (obj) {
191 obj->ref();
192 }
bungeman@google.com1fd201b2012-08-22 18:56:56 +0000193 return obj;
reed@google.com7f6d6d42011-07-15 15:25:22 +0000194}
195
196/** Check if the argument is non-null, and if so, call obj->unref()
197 */
198template <typename T> static inline void SkSafeUnref(T* obj) {
199 if (obj) {
200 obj->unref();
201 }
202}
203
commit-bot@chromium.orga4de8c22013-09-09 13:38:37 +0000204template<typename T> static inline void SkSafeSetNull(T*& obj) {
bsalomon49f085d2014-09-05 13:34:00 -0700205 if (obj) {
commit-bot@chromium.orga4de8c22013-09-09 13:38:37 +0000206 obj->unref();
bungeman77a53de2015-10-01 12:28:49 -0700207 obj = nullptr;
commit-bot@chromium.orga4de8c22013-09-09 13:38:37 +0000208 }
209}
210
reed@google.com7f6d6d42011-07-15 15:25:22 +0000211///////////////////////////////////////////////////////////////////////////////
212
mtklein08d1fcc2014-11-20 09:18:31 -0800213// This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
214// There's only benefit to using this if the deriving class does not otherwise need a vtable.
215template <typename Derived>
Hal Canarye08ce402018-06-14 14:39:33 -0400216class SkNVRefCnt {
mtklein08d1fcc2014-11-20 09:18:31 -0800217public:
218 SkNVRefCnt() : fRefCnt(1) {}
bungeman2c4bd072016-04-08 06:58:51 -0700219 ~SkNVRefCnt() { SkASSERTF(1 == getRefCnt(), "NVRefCnt was %d", getRefCnt()); }
Hal Canarye08ce402018-06-14 14:39:33 -0400220 SkNVRefCnt(SkNVRefCnt&&) = delete;
221 SkNVRefCnt(const SkNVRefCnt&) = delete;
222 SkNVRefCnt& operator=(SkNVRefCnt&&) = delete;
223 SkNVRefCnt& operator=(const SkNVRefCnt&) = delete;
mtklein08d1fcc2014-11-20 09:18:31 -0800224
225 // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
226 // - unique() needs acquire when it returns true, and no barrier if it returns false;
227 // - ref() doesn't need any barrier;
228 // - unref() needs a release barrier, and an acquire if it's going to call delete.
229
bungeman2c4bd072016-04-08 06:58:51 -0700230 bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
231 void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
reed90d0ff02014-11-24 12:02:31 -0800232 void unref() const {
bungeman2c4bd072016-04-08 06:58:51 -0700233 if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
234 // restore the 1 for our destructor's assert
235 SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
mtklein97466ab2015-12-07 13:37:00 -0800236 delete (const Derived*)this;
reed90d0ff02014-11-24 12:02:31 -0800237 }
238 }
mtklein7b274c72015-02-03 13:38:58 -0800239 void deref() const { this->unref(); }
Florin Malita844aa332014-11-20 16:56:22 -0500240
mtklein08d1fcc2014-11-20 09:18:31 -0800241private:
bungeman2c4bd072016-04-08 06:58:51 -0700242 mutable std::atomic<int32_t> fRefCnt;
243 int32_t getRefCnt() const {
244 return fRefCnt.load(std::memory_order_relaxed);
245 }
mtklein08d1fcc2014-11-20 09:18:31 -0800246};
247
reedbb7b0432016-03-01 07:28:51 -0800248///////////////////////////////////////////////////////////////////////////////////////////////////
249
250/**
251 * Shared pointer class to wrap classes that support a ref()/unref() interface.
252 *
253 * This can be used for classes inheriting from SkRefCnt, but it also works for other
254 * classes that match the interface, but have different internal choices: e.g. the hosted class
255 * may have its ref/unref be thread-safe, but that is not assumed/imposed by sk_sp.
256 */
257template <typename T> class sk_sp {
258public:
bungemanc901c112016-03-08 08:35:23 -0800259 using element_type = T;
260
bungeman06ca8ec2016-06-09 08:01:03 -0700261 constexpr sk_sp() : fPtr(nullptr) {}
262 constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
reedbb7b0432016-03-01 07:28:51 -0800263
264 /**
265 * Shares the underlying object by calling ref(), so that both the argument and the newly
266 * created sk_sp both have a reference to it.
267 */
268 sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
Hal Canary22479652018-06-20 16:13:05 -0400269 template <typename U,
270 typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
halcanarycb6cb382016-03-02 08:11:26 -0800271 sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
reedbb7b0432016-03-01 07:28:51 -0800272
273 /**
274 * Move the underlying object from the argument to the newly created sk_sp. Afterwards only
275 * the new sk_sp will have a reference to the object, and the argument will point to null.
276 * No call to ref() or unref() will be made.
277 */
278 sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
Hal Canary22479652018-06-20 16:13:05 -0400279 template <typename U,
280 typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
halcanarycb6cb382016-03-02 08:11:26 -0800281 sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
reedbb7b0432016-03-01 07:28:51 -0800282
283 /**
284 * Adopt the bare pointer into the newly created sk_sp.
285 * No call to ref() or unref() will be made.
286 */
287 explicit sk_sp(T* obj) : fPtr(obj) {}
288
289 /**
290 * Calls unref() on the underlying object pointer.
291 */
292 ~sk_sp() {
293 SkSafeUnref(fPtr);
bungemanc901c112016-03-08 08:35:23 -0800294 SkDEBUGCODE(fPtr = nullptr);
reedbb7b0432016-03-01 07:28:51 -0800295 }
296
halcanarycb6cb382016-03-02 08:11:26 -0800297 sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
reedbb7b0432016-03-01 07:28:51 -0800298
299 /**
300 * Shares the underlying object referenced by the argument by calling ref() on it. If this
301 * sk_sp previously had a reference to an object (i.e. not null) it will call unref() on that
302 * object.
303 */
304 sk_sp<T>& operator=(const sk_sp<T>& that) {
Hal Canaryefb84e62018-05-08 13:32:02 -0400305 if (this != &that) {
306 this->reset(SkSafeRef(that.get()));
307 }
reedbb7b0432016-03-01 07:28:51 -0800308 return *this;
309 }
Hal Canary22479652018-06-20 16:13:05 -0400310 template <typename U,
311 typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
halcanarycb6cb382016-03-02 08:11:26 -0800312 sk_sp<T>& operator=(const sk_sp<U>& that) {
313 this->reset(SkSafeRef(that.get()));
314 return *this;
315 }
reedbb7b0432016-03-01 07:28:51 -0800316
317 /**
318 * Move the underlying object from the argument to the sk_sp. If the sk_sp previously held
319 * a reference to another object, unref() will be called on that object. No call to ref()
320 * will be made.
321 */
322 sk_sp<T>& operator=(sk_sp<T>&& that) {
323 this->reset(that.release());
324 return *this;
325 }
Hal Canary22479652018-06-20 16:13:05 -0400326 template <typename U,
327 typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
halcanarycb6cb382016-03-02 08:11:26 -0800328 sk_sp<T>& operator=(sk_sp<U>&& that) {
329 this->reset(that.release());
330 return *this;
331 }
reedbb7b0432016-03-01 07:28:51 -0800332
bungemanbeab9e72016-03-03 07:50:49 -0800333 T& operator*() const {
334 SkASSERT(this->get() != nullptr);
335 return *this->get();
336 }
337
Ben Wagner5d1adbf2018-05-28 13:35:39 -0400338 explicit operator bool() const { return this->get() != nullptr; }
reedbb7b0432016-03-01 07:28:51 -0800339
340 T* get() const { return fPtr; }
341 T* operator->() const { return fPtr; }
342
343 /**
344 * Adopt the new bare pointer, and call unref() on any previously held object (if not null).
345 * No call to ref() will be made.
346 */
347 void reset(T* ptr = nullptr) {
bungemanc901c112016-03-08 08:35:23 -0800348 // Calling fPtr->unref() may call this->~() or this->reset(T*).
349 // http://wg21.cmeerw.net/lwg/issue998
350 // http://wg21.cmeerw.net/lwg/issue2262
351 T* oldPtr = fPtr;
reed941da9d2016-03-06 13:54:00 -0800352 fPtr = ptr;
bungemanc901c112016-03-08 08:35:23 -0800353 SkSafeUnref(oldPtr);
reedbb7b0432016-03-01 07:28:51 -0800354 }
355
356 /**
357 * Return the bare pointer, and set the internal object pointer to nullptr.
358 * The caller must assume ownership of the object, and manage its reference count directly.
359 * No call to unref() will be made.
360 */
361 T* SK_WARN_UNUSED_RESULT release() {
362 T* ptr = fPtr;
363 fPtr = nullptr;
364 return ptr;
365 }
366
bungemanc901c112016-03-08 08:35:23 -0800367 void swap(sk_sp<T>& that) /*noexcept*/ {
368 using std::swap;
369 swap(fPtr, that.fPtr);
370 }
371
reedbb7b0432016-03-01 07:28:51 -0800372private:
373 T* fPtr;
374};
375
bungemanc901c112016-03-08 08:35:23 -0800376template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
377 a.swap(b);
378}
379
380template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
381 return a.get() == b.get();
382}
383template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
384 return !a;
385}
386template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
387 return !b;
388}
389
390template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
391 return a.get() != b.get();
392}
393template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
394 return static_cast<bool>(a);
395}
396template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
397 return static_cast<bool>(b);
398}
399
400template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) {
401 // Provide defined total order on sk_sp.
402 // http://wg21.cmeerw.net/lwg/issue1297
403 // http://wg21.cmeerw.net/lwg/issue1401 .
Hal Canary22479652018-06-20 16:13:05 -0400404 return std::less<typename std::common_type<T*, U*>::type>()(a.get(), b.get());
bungemanc901c112016-03-08 08:35:23 -0800405}
406template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) {
407 return std::less<T*>()(a.get(), nullptr);
408}
409template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) {
410 return std::less<T*>()(nullptr, b.get());
411}
412
413template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) {
414 return !(b < a);
415}
416template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) {
417 return !(nullptr < a);
418}
419template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) {
420 return !(b < nullptr);
421}
422
423template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) {
424 return b < a;
425}
426template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) {
427 return nullptr < a;
428}
429template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) {
430 return b < nullptr;
431}
432
433template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) {
434 return !(a < b);
435}
436template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) {
437 return !(a < nullptr);
438}
439template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) {
440 return !(nullptr < b);
441}
442
Ben Wagner5d1adbf2018-05-28 13:35:39 -0400443template <typename C, typename CT, typename T>
444auto operator<<(std::basic_ostream<C, CT>& os, const sk_sp<T>& sp) -> decltype(os << sp.get()) {
445 return os << sp.get();
446}
447
halcanary217c0b32016-03-02 08:06:20 -0800448template <typename T, typename... Args>
449sk_sp<T> sk_make_sp(Args&&... args) {
450 return sk_sp<T>(new T(std::forward<Args>(args)...));
451}
452
reed647cc842016-03-08 12:54:48 -0800453/*
454 * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
455 *
456 * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
457 * effectively "adopting" it.
reed647cc842016-03-08 12:54:48 -0800458 */
459template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
460 return sk_sp<T>(SkSafeRef(obj));
461}
462
Mike Reed19d7bd62018-02-19 14:10:57 -0500463template <typename T> sk_sp<T> sk_ref_sp(const T* obj) {
464 return sk_sp<T>(const_cast<T*>(SkSafeRef(obj)));
465}
466
reed647cc842016-03-08 12:54:48 -0800467#endif