Skia
2D Graphics Library
SkRefCnt.h
Go to the documentation of this file.
1 /*
2  * Copyright 2006 The Android Open Source Project
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef SkRefCnt_DEFINED
9 #define SkRefCnt_DEFINED
10 
11 #include "include/core/SkTypes.h"
12 #include "include/private/base/SkDebug.h"
13 
14 #include <atomic>
15 #include <cstddef>
16 #include <cstdint>
17 #include <iosfwd>
18 #include <type_traits>
19 #include <utility>
20 
31 class SK_API SkRefCntBase {
32 public:
35  SkRefCntBase() : fRefCnt(1) {}
36 
39  virtual ~SkRefCntBase() {
40  #ifdef SK_DEBUG
41  SkASSERTF(this->getRefCnt() == 1, "fRefCnt was %d", this->getRefCnt());
42  // illegal value, to catch us if we reuse after delete
43  fRefCnt.store(0, std::memory_order_relaxed);
44  #endif
45  }
46 
50  bool unique() const {
51  if (1 == fRefCnt.load(std::memory_order_acquire)) {
52  // The acquire barrier is only really needed if we return true. It
53  // prevents code conditioned on the result of unique() from running
54  // until previous owners are all totally done calling unref().
55  return true;
56  }
57  return false;
58  }
59 
62  void ref() const {
63  SkASSERT(this->getRefCnt() > 0);
64  // No barrier required.
65  (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
66  }
67 
72  void unref() const {
73  SkASSERT(this->getRefCnt() > 0);
74  // A release here acts in place of all releases we "should" have been doing in ref().
75  if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
76  // Like unique(), the acquire is only needed on success, to make sure
77  // code in internal_dispose() doesn't happen before the decrement.
78  this->internal_dispose();
79  }
80  }
81 
82 private:
83 
84 #ifdef SK_DEBUG
86  int32_t getRefCnt() const {
87  return fRefCnt.load(std::memory_order_relaxed);
88  }
89 #endif
90 
94  virtual void internal_dispose() const {
95  #ifdef SK_DEBUG
96  SkASSERT(0 == this->getRefCnt());
97  fRefCnt.store(1, std::memory_order_relaxed);
98  #endif
99  delete this;
100  }
101 
102  // The following friends are those which override internal_dispose()
103  // and conditionally call SkRefCnt::internal_dispose().
104  friend class SkWeakRefCnt;
105 
106  mutable std::atomic<int32_t> fRefCnt;
107 
108  SkRefCntBase(SkRefCntBase&&) = delete;
109  SkRefCntBase(const SkRefCntBase&) = delete;
110  SkRefCntBase& operator=(SkRefCntBase&&) = delete;
111  SkRefCntBase& operator=(const SkRefCntBase&) = delete;
112 };
113 
114 #ifdef SK_REF_CNT_MIXIN_INCLUDE
115 // It is the responsibility of the following include to define the type SkRefCnt.
116 // This SkRefCnt should normally derive from SkRefCntBase.
117 #include SK_REF_CNT_MIXIN_INCLUDE
118 #else
119 class SK_API SkRefCnt : public SkRefCntBase {
120  // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
121  #if defined(SK_BUILD_FOR_GOOGLE3)
122  public:
123  void deref() const { this->unref(); }
124  #endif
125 };
126 #endif
127 
129 
132 template <typename T> static inline T* SkRef(T* obj) {
133  SkASSERT(obj);
134  obj->ref();
135  return obj;
136 }
137 
140 template <typename T> static inline T* SkSafeRef(T* obj) {
141  if (obj) {
142  obj->ref();
143  }
144  return obj;
145 }
146 
149 template <typename T> static inline void SkSafeUnref(T* obj) {
150  if (obj) {
151  obj->unref();
152  }
153 }
154 
156 
157 // This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
158 // There's only benefit to using this if the deriving class does not otherwise need a vtable.
159 template <typename Derived>
160 class SkNVRefCnt {
161 public:
162  SkNVRefCnt() : fRefCnt(1) {}
164  #ifdef SK_DEBUG
165  int rc = fRefCnt.load(std::memory_order_relaxed);
166  SkASSERTF(rc == 1, "NVRefCnt was %d", rc);
167  #endif
168  }
169 
170  // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
171  // - unique() needs acquire when it returns true, and no barrier if it returns false;
172  // - ref() doesn't need any barrier;
173  // - unref() needs a release barrier, and an acquire if it's going to call delete.
174 
175  bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
176  void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
177  void unref() const {
178  if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
179  // restore the 1 for our destructor's assert
180  SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
181  delete (const Derived*)this;
182  }
183  }
184  void deref() const { this->unref(); }
185 
186  // This must be used with caution. It is only valid to call this when 'threadIsolatedTestCnt'
187  // refs are known to be isolated to the current thread. That is, it is known that there are at
188  // least 'threadIsolatedTestCnt' refs for which no other thread may make a balancing unref()
189  // call. Assuming the contract is followed, if this returns false then no other thread has
190  // ownership of this. If it returns true then another thread *may* have ownership.
191  bool refCntGreaterThan(int32_t threadIsolatedTestCnt) const {
192  int cnt = fRefCnt.load(std::memory_order_acquire);
193  // If this fails then the above contract has been violated.
194  SkASSERT(cnt >= threadIsolatedTestCnt);
195  return cnt > threadIsolatedTestCnt;
196  }
197 
198 private:
199  mutable std::atomic<int32_t> fRefCnt;
200 
201  SkNVRefCnt(SkNVRefCnt&&) = delete;
202  SkNVRefCnt(const SkNVRefCnt&) = delete;
203  SkNVRefCnt& operator=(SkNVRefCnt&&) = delete;
204  SkNVRefCnt& operator=(const SkNVRefCnt&) = delete;
205 };
206 
208 
220 template <typename T> class SK_TRIVIAL_ABI sk_sp {
221 public:
222  using element_type = T;
223 
224  constexpr sk_sp() : fPtr(nullptr) {}
225  constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
226 
231  sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
232  template <typename U,
233  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
234  sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
235 
241  sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
242  template <typename U,
243  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
244  sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
245 
250  explicit sk_sp(T* obj) : fPtr(obj) {}
251 
255  ~sk_sp() {
256  SkSafeUnref(fPtr);
257  SkDEBUGCODE(fPtr = nullptr);
258  }
259 
260  sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
261 
267  sk_sp<T>& operator=(const sk_sp<T>& that) {
268  if (this != &that) {
269  this->reset(SkSafeRef(that.get()));
270  }
271  return *this;
272  }
273  template <typename U,
274  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
275  sk_sp<T>& operator=(const sk_sp<U>& that) {
276  this->reset(SkSafeRef(that.get()));
277  return *this;
278  }
279 
286  this->reset(that.release());
287  return *this;
288  }
289  template <typename U,
290  typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
292  this->reset(that.release());
293  return *this;
294  }
295 
296  T& operator*() const {
297  SkASSERT(this->get() != nullptr);
298  return *this->get();
299  }
300 
301  explicit operator bool() const { return this->get() != nullptr; }
302 
303  T* get() const { return fPtr; }
304  T* operator->() const { return fPtr; }
305 
310  void reset(T* ptr = nullptr) {
311  // Calling fPtr->unref() may call this->~() or this->reset(T*).
312  // http://wg21.cmeerw.net/lwg/issue998
313  // http://wg21.cmeerw.net/lwg/issue2262
314  T* oldPtr = fPtr;
315  fPtr = ptr;
316  SkSafeUnref(oldPtr);
317  }
318 
324  [[nodiscard]] T* release() {
325  T* ptr = fPtr;
326  fPtr = nullptr;
327  return ptr;
328  }
329 
330  void swap(sk_sp<T>& that) /*noexcept*/ {
331  using std::swap;
332  swap(fPtr, that.fPtr);
333  }
334 
335  using sk_is_trivially_relocatable = std::true_type;
336 
337 private:
338  T* fPtr;
339 };
340 
341 template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
342  a.swap(b);
343 }
344 
345 template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
346  return a.get() == b.get();
347 }
348 template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
349  return !a;
350 }
351 template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
352  return !b;
353 }
354 
355 template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
356  return a.get() != b.get();
357 }
358 template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
359  return static_cast<bool>(a);
360 }
361 template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
362  return static_cast<bool>(b);
363 }
364 
365 template <typename C, typename CT, typename T>
366 auto operator<<(std::basic_ostream<C, CT>& os, const sk_sp<T>& sp) -> decltype(os << sp.get()) {
367  return os << sp.get();
368 }
369 
370 template <typename T, typename... Args>
371 sk_sp<T> sk_make_sp(Args&&... args) {
372  return sk_sp<T>(new T(std::forward<Args>(args)...));
373 }
374 
375 /*
376  * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
377  *
378  * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
379  * effectively "adopting" it.
380  */
381 template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
382  return sk_sp<T>(SkSafeRef(obj));
383 }
384 
385 template <typename T> sk_sp<T> sk_ref_sp(const T* obj) {
386  return sk_sp<T>(const_cast<T*>(SkSafeRef(obj)));
387 }
388 
389 #endif
static T * SkSafeRef(T *obj)
Check if the argument is non-null, and if so, call obj->ref() and return obj.
Definition: SkRefCnt.h:140
static T * SkRef(T *obj)
Call obj->ref() and return obj.
Definition: SkRefCnt.h:132
bool operator!=(const sk_sp< T > &a, const sk_sp< U > &b)
Definition: SkRefCnt.h:355
sk_sp< T > sk_ref_sp(T *obj)
Definition: SkRefCnt.h:381
static void SkSafeUnref(T *obj)
Check if the argument is non-null, and if so, call obj->unref()
Definition: SkRefCnt.h:149
sk_sp< T > sk_make_sp(Args &&... args)
Definition: SkRefCnt.h:371
auto operator<<(std::basic_ostream< C, CT > &os, const sk_sp< T > &sp) -> decltype(os<< sp.get())
Definition: SkRefCnt.h:366
bool operator==(const sk_sp< T > &a, const sk_sp< U > &b)
Definition: SkRefCnt.h:345
void swap(sk_sp< T > &a, sk_sp< T > &b)
Definition: SkRefCnt.h:341
Definition: SkRefCnt.h:160
void unref() const
Definition: SkRefCnt.h:177
SkNVRefCnt()
Definition: SkRefCnt.h:162
void ref() const
Definition: SkRefCnt.h:176
bool unique() const
Definition: SkRefCnt.h:175
bool refCntGreaterThan(int32_t threadIsolatedTestCnt) const
Definition: SkRefCnt.h:191
void deref() const
Definition: SkRefCnt.h:184
~SkNVRefCnt()
Definition: SkRefCnt.h:163
SkRefCntBase is the base class for objects that may be shared by multiple objects.
Definition: SkRefCnt.h:31
SkRefCntBase()
Default construct, initializing the reference count to 1.
Definition: SkRefCnt.h:35
void ref() const
Increment the reference count.
Definition: SkRefCnt.h:62
virtual ~SkRefCntBase()
Destruct, asserting that the reference count is 1.
Definition: SkRefCnt.h:39
bool unique() const
May return true if the caller is the only owner.
Definition: SkRefCnt.h:50
void unref() const
Decrement the reference count.
Definition: SkRefCnt.h:72
Definition: SkRefCnt.h:119
Shared pointer class to wrap classes that support a ref()/unref() interface.
Definition: SkRefCnt.h:220
T & operator*() const
Definition: SkRefCnt.h:296
void swap(sk_sp< T > &that)
Definition: SkRefCnt.h:330
sk_sp(T *obj)
Adopt the bare pointer into the newly created sk_sp.
Definition: SkRefCnt.h:250
sk_sp< T > & operator=(sk_sp< U > &&that)
Definition: SkRefCnt.h:291
std::true_type sk_is_trivially_relocatable
Definition: SkRefCnt.h:335
sk_sp(sk_sp< U > &&that)
Definition: SkRefCnt.h:244
T * get() const
Definition: SkRefCnt.h:303
sk_sp(const sk_sp< U > &that)
Definition: SkRefCnt.h:234
T * operator->() const
Definition: SkRefCnt.h:304
T * release()
Return the bare pointer, and set the internal object pointer to nullptr.
Definition: SkRefCnt.h:324
sk_sp< T > & operator=(const sk_sp< U > &that)
Definition: SkRefCnt.h:275
sk_sp< T > & operator=(const sk_sp< T > &that)
Shares the underlying object referenced by the argument by calling ref() on it.
Definition: SkRefCnt.h:267
constexpr sk_sp(std::nullptr_t)
Definition: SkRefCnt.h:225
sk_sp< T > & operator=(std::nullptr_t)
Definition: SkRefCnt.h:260
constexpr sk_sp()
Definition: SkRefCnt.h:224
void reset(T *ptr=nullptr)
Adopt the new bare pointer, and call unref() on any previously held object (if not null).
Definition: SkRefCnt.h:310
sk_sp< T > & operator=(sk_sp< T > &&that)
Move the underlying object from the argument to the sk_sp.
Definition: SkRefCnt.h:285
T element_type
Definition: SkRefCnt.h:222
sk_sp(const sk_sp< T > &that)
Shares the underlying object by calling ref(), so that both the argument and the newly created sk_sp ...
Definition: SkRefCnt.h:231
sk_sp(sk_sp< T > &&that)
Move the underlying object from the argument to the newly created sk_sp.
Definition: SkRefCnt.h:241
~sk_sp()
Calls unref() on the underlying object pointer.
Definition: SkRefCnt.h:255