Skia
2D Graphics Library
SkRefCnt.h
Go to the documentation of this file.
1 /*
2  * Copyright 2006 The Android Open Source Project
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef SkRefCnt_DEFINED
9 #define SkRefCnt_DEFINED
10 
11 #include "include/core/SkTypes.h"
12 
13 #include <atomic> // std::atomic, std::memory_order_*
14 #include <cstddef> // std::nullptr_t
15 #include <iosfwd> // std::basic_ostream
16 #include <memory> // TODO: unused
17 #include <type_traits> // std::enable_if, std::is_convertible
18 #include <utility> // std::forward, std::swap
19 
31 public:
34  SkRefCntBase() : fRefCnt(1) {}
35 
38  virtual ~SkRefCntBase() {
39  #ifdef SK_DEBUG
40  SkASSERTF(this->getRefCnt() == 1, "fRefCnt was %d", this->getRefCnt());
41  // illegal value, to catch us if we reuse after delete
42  fRefCnt.store(0, std::memory_order_relaxed);
43  #endif
44  }
45 
49  bool unique() const {
50  if (1 == fRefCnt.load(std::memory_order_acquire)) {
51  // The acquire barrier is only really needed if we return true. It
52  // prevents code conditioned on the result of unique() from running
53  // until previous owners are all totally done calling unref().
54  return true;
55  }
56  return false;
57  }
58 
61  void ref() const {
62  SkASSERT(this->getRefCnt() > 0);
63  // No barrier required.
64  (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
65  }
66 
71  void unref() const {
72  SkASSERT(this->getRefCnt() > 0);
73  // A release here acts in place of all releases we "should" have been doing in ref().
74  if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
75  // Like unique(), the acquire is only needed on success, to make sure
76  // code in internal_dispose() doesn't happen before the decrement.
77  this->internal_dispose();
78  }
79  }
80 
81 private:
82 
83 #ifdef SK_DEBUG
84 
85  int32_t getRefCnt() const {
86  return fRefCnt.load(std::memory_order_relaxed);
87  }
88 #endif
89 
93  virtual void internal_dispose() const {
94  #ifdef SK_DEBUG
95  SkASSERT(0 == this->getRefCnt());
96  fRefCnt.store(1, std::memory_order_relaxed);
97  #endif
98  delete this;
99  }
100 
101  // The following friends are those which override internal_dispose()
102  // and conditionally call SkRefCnt::internal_dispose().
103  friend class SkWeakRefCnt;
104 
105  mutable std::atomic<int32_t> fRefCnt;
106 
107  SkRefCntBase(SkRefCntBase&&) = delete;
108  SkRefCntBase(const SkRefCntBase&) = delete;
109  SkRefCntBase& operator=(SkRefCntBase&&) = delete;
110  SkRefCntBase& operator=(const SkRefCntBase&) = delete;
111 };
112 
113 #ifdef SK_REF_CNT_MIXIN_INCLUDE
114 // It is the responsibility of the following include to define the type SkRefCnt.
115 // This SkRefCnt should normally derive from SkRefCntBase.
116 #include SK_REF_CNT_MIXIN_INCLUDE
117 #else
118 class SK_API SkRefCnt : public SkRefCntBase {
119  // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
120  #if defined(SK_BUILD_FOR_GOOGLE3)
121  public:
122  void deref() const { this->unref(); }
123  #endif
124 };
125 #endif
126 
128 
131 template <typename T> static inline T* SkRef(T* obj) {
132  SkASSERT(obj);
133  obj->ref();
134  return obj;
135 }
136 
139 template <typename T> static inline T* SkSafeRef(T* obj) {
140  if (obj) {
141  obj->ref();
142  }
143  return obj;
144 }
145 
148 template <typename T> static inline void SkSafeUnref(T* obj) {
149  if (obj) {
150  obj->unref();
151  }
152 }
153 
155 
156 // This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
157 // There's only benefit to using this if the deriving class does not otherwise need a vtable.
158 template <typename Derived>
159 class SkNVRefCnt {
160 public:
161  SkNVRefCnt() : fRefCnt(1) {}
163  #ifdef SK_DEBUG
164  int rc = fRefCnt.load(std::memory_order_relaxed);
165  SkASSERTF(rc == 1, "NVRefCnt was %d", rc);
166  #endif
167  }
168 
169  // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
170  // - unique() needs acquire when it returns true, and no barrier if it returns false;
171  // - ref() doesn't need any barrier;
172  // - unref() needs a release barrier, and an acquire if it's going to call delete.
173 
174  bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
175  void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
176  void unref() const {
177  if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
178  // restore the 1 for our destructor's assert
179  SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
180  delete (const Derived*)this;
181  }
182  }
183  void deref() const { this->unref(); }
184 
185  // This must be used with caution. It is only valid to call this when 'threadIsolatedTestCnt'
186  // refs are known to be isolated to the current thread. That is, it is known that there are at
187  // least 'threadIsolatedTestCnt' refs for which no other thread may make a balancing unref()
188  // call. Assuming the contract is followed, if this returns false then no other thread has
189  // ownership of this. If it returns true then another thread *may* have ownership.
190  bool refCntGreaterThan(int32_t threadIsolatedTestCnt) const {
191  int cnt = fRefCnt.load(std::memory_order_acquire);
192  // If this fails then the above contract has been violated.
193  SkASSERT(cnt >= threadIsolatedTestCnt);
194  return cnt > threadIsolatedTestCnt;
195  }
196 
197 private:
198  mutable std::atomic<int32_t> fRefCnt;
199 
200  SkNVRefCnt(SkNVRefCnt&&) = delete;
201  SkNVRefCnt(const SkNVRefCnt&) = delete;
202  SkNVRefCnt& operator=(SkNVRefCnt&&) = delete;
203  SkNVRefCnt& operator=(const SkNVRefCnt&) = delete;
204 };
205 
207 
215 template <typename T> class sk_sp {
216 public:
217  using element_type = T;
218 
219  constexpr sk_sp() : fPtr(nullptr) {}
220  constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
221 
226  sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
227  template <typename U,
228  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
229  sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
230 
236  sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
237  template <typename U,
238  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
239  sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
240 
245  explicit sk_sp(T* obj) : fPtr(obj) {}
246 
250  ~sk_sp() {
251  SkSafeUnref(fPtr);
252  SkDEBUGCODE(fPtr = nullptr);
253  }
254 
255  sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
256 
262  sk_sp<T>& operator=(const sk_sp<T>& that) {
263  if (this != &that) {
264  this->reset(SkSafeRef(that.get()));
265  }
266  return *this;
267  }
268  template <typename U,
269  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
270  sk_sp<T>& operator=(const sk_sp<U>& that) {
271  this->reset(SkSafeRef(that.get()));
272  return *this;
273  }
274 
281  this->reset(that.release());
282  return *this;
283  }
284  template <typename U,
285  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
287  this->reset(that.release());
288  return *this;
289  }
290 
291  T& operator*() const {
292  SkASSERT(this->get() != nullptr);
293  return *this->get();
294  }
295 
296  explicit operator bool() const { return this->get() != nullptr; }
297 
298  T* get() const { return fPtr; }
299  T* operator->() const { return fPtr; }
300 
305  void reset(T* ptr = nullptr) {
306  // Calling fPtr->unref() may call this->~() or this->reset(T*).
307  // http://wg21.cmeerw.net/lwg/issue998
308  // http://wg21.cmeerw.net/lwg/issue2262
309  T* oldPtr = fPtr;
310  fPtr = ptr;
311  SkSafeUnref(oldPtr);
312  }
313 
320  T* ptr = fPtr;
321  fPtr = nullptr;
322  return ptr;
323  }
324 
325  void swap(sk_sp<T>& that) /*noexcept*/ {
326  using std::swap;
327  swap(fPtr, that.fPtr);
328  }
329 
330 private:
331  T* fPtr;
332 };
333 
334 template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
335  a.swap(b);
336 }
337 
338 template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
339  return a.get() == b.get();
340 }
341 template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
342  return !a;
343 }
344 template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
345  return !b;
346 }
347 
348 template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
349  return a.get() != b.get();
350 }
351 template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
352  return static_cast<bool>(a);
353 }
354 template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
355  return static_cast<bool>(b);
356 }
357 
358 template <typename C, typename CT, typename T>
359 auto operator<<(std::basic_ostream<C, CT>& os, const sk_sp<T>& sp) -> decltype(os << sp.get()) {
360  return os << sp.get();
361 }
362 
363 template <typename T, typename... Args>
364 sk_sp<T> sk_make_sp(Args&&... args) {
365  return sk_sp<T>(new T(std::forward<Args>(args)...));
366 }
367 
368 /*
369  * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
370  *
371  * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
372  * effectively "adopting" it.
373  */
374 template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
375  return sk_sp<T>(SkSafeRef(obj));
376 }
377 
378 template <typename T> sk_sp<T> sk_ref_sp(const T* obj) {
379  return sk_sp<T>(const_cast<T*>(SkSafeRef(obj)));
380 }
381 
382 #endif
sk_sp::operator=
sk_sp< T > & operator=(const sk_sp< T > &that)
Shares the underlying object referenced by the argument by calling ref() on it.
Definition: SkRefCnt.h:262
SkRefCntBase::~SkRefCntBase
virtual ~SkRefCntBase()
Destruct, asserting that the reference count is 1.
Definition: SkRefCnt.h:38
sk_sp::operator=
sk_sp< T > & operator=(sk_sp< T > &&that)
Move the underlying object from the argument to the sk_sp.
Definition: SkRefCnt.h:280
SkNVRefCnt::unref
void unref() const
Definition: SkRefCnt.h:176
operator==
bool operator==(const sk_sp< T > &a, const sk_sp< U > &b)
Definition: SkRefCnt.h:338
SkRefCnt
Definition: SkRefCnt.h:118
sk_sp::sk_sp
constexpr sk_sp(std::nullptr_t)
Definition: SkRefCnt.h:220
SkNVRefCnt::~SkNVRefCnt
~SkNVRefCnt()
Definition: SkRefCnt.h:162
SkRefCntBase
Definition: SkRefCnt.h:30
SkTypes.h
sk_sp::release
T *SK_WARN_UNUSED_RESULT release()
Return the bare pointer, and set the internal object pointer to nullptr.
Definition: SkRefCnt.h:319
sk_sp::operator=
sk_sp< T > & operator=(sk_sp< U > &&that)
Definition: SkRefCnt.h:286
SkNVRefCnt
Definition: SkRefCnt.h:159
SkNVRefCnt::unique
bool unique() const
Definition: SkRefCnt.h:174
sk_ref_sp
sk_sp< T > sk_ref_sp(T *obj)
Definition: SkRefCnt.h:374
sk_sp::reset
void reset(T *ptr=nullptr)
Adopt the new bare pointer, and call unref() on any previously held object (if not null).
Definition: SkRefCnt.h:305
SkNVRefCnt::deref
void deref() const
Definition: SkRefCnt.h:183
SkRefCntBase::SkRefCntBase
SkRefCntBase()
Default construct, initializing the reference count to 1.
Definition: SkRefCnt.h:34
SkRefCntBase::ref
void ref() const
Increment the reference count.
Definition: SkRefCnt.h:61
SkSafeUnref
static void SkSafeUnref(T *obj)
Check if the argument is non-null, and if so, call obj->unref()
Definition: SkRefCnt.h:148
sk_sp::swap
void swap(sk_sp< T > &that)
Definition: SkRefCnt.h:325
SkRef
static T * SkRef(T *obj)
Call obj->ref() and return obj.
Definition: SkRefCnt.h:131
operator!=
bool operator!=(const sk_sp< T > &a, const sk_sp< U > &b)
Definition: SkRefCnt.h:348
sk_make_sp
sk_sp< T > sk_make_sp(Args &&... args)
Definition: SkRefCnt.h:364
sk_sp
Shared pointer class to wrap classes that support a ref()/unref() interface.
Definition: SkRefCnt.h:215
SkSafeRef
static T * SkSafeRef(T *obj)
Check if the argument is non-null, and if so, call obj->ref() and return obj.
Definition: SkRefCnt.h:139
sk_sp::sk_sp
sk_sp(T *obj)
Adopt the bare pointer into the newly created sk_sp.
Definition: SkRefCnt.h:245
SkRefCntBase::unique
bool unique() const
May return true if the caller is the only owner.
Definition: SkRefCnt.h:49
SkNVRefCnt::SkNVRefCnt
SkNVRefCnt()
Definition: SkRefCnt.h:161
SkDEBUGCODE
#define SkDEBUGCODE(...)
Definition: SkTypes.h:467
SkASSERTF
#define SkASSERTF(cond, fmt,...)
Definition: SkTypes.h:461
SK_API
#define SK_API
Definition: SkTypes.h:181
SkASSERT
#define SkASSERT(cond)
Definition: SkTypes.h:460
SkRefCntBase::unref
void unref() const
Decrement the reference count.
Definition: SkRefCnt.h:71
sk_sp::sk_sp
sk_sp(sk_sp< U > &&that)
Definition: SkRefCnt.h:239
sk_sp::~sk_sp
~sk_sp()
Calls unref() on the underlying object pointer.
Definition: SkRefCnt.h:250
sk_sp::sk_sp
sk_sp(sk_sp< T > &&that)
Move the underlying object from the argument to the newly created sk_sp.
Definition: SkRefCnt.h:236
sk_sp::get
T * get() const
Definition: SkRefCnt.h:298
sk_sp::sk_sp
constexpr sk_sp()
Definition: SkRefCnt.h:219
swap
void swap(sk_sp< T > &a, sk_sp< T > &b)
Definition: SkRefCnt.h:334
sk_sp::operator=
sk_sp< T > & operator=(std::nullptr_t)
Definition: SkRefCnt.h:255
sk_sp::sk_sp
sk_sp(const sk_sp< T > &that)
Shares the underlying object by calling ref(), so that both the argument and the newly created sk_sp ...
Definition: SkRefCnt.h:226
sk_sp< SkMarkerStack >::element_type
SkMarkerStack element_type
Definition: SkRefCnt.h:217
operator<<
auto operator<<(std::basic_ostream< C, CT > &os, const sk_sp< T > &sp) -> decltype(os<< sp.get())
Definition: SkRefCnt.h:359
sk_sp::operator->
T * operator->() const
Definition: SkRefCnt.h:299
sk_sp::operator*
T & operator*() const
Definition: SkRefCnt.h:291
SK_WARN_UNUSED_RESULT
#define SK_WARN_UNUSED_RESULT
Definition: SkTypes.h:60
SkNVRefCnt::refCntGreaterThan
bool refCntGreaterThan(int32_t threadIsolatedTestCnt) const
Definition: SkRefCnt.h:190
sk_sp::operator=
sk_sp< T > & operator=(const sk_sp< U > &that)
Definition: SkRefCnt.h:270
sk_sp::sk_sp
sk_sp(const sk_sp< U > &that)
Definition: SkRefCnt.h:229
SkNVRefCnt::ref
void ref() const
Definition: SkRefCnt.h:175