Skia
2DGraphicsLibrary
 All Classes Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
SkRefCnt.h
1 /*
2  * Copyright 2006 The Android Open Source Project
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef SkRefCnt_DEFINED
9 #define SkRefCnt_DEFINED
10 
11 #include "../private/SkTLogic.h"
12 #include "SkTypes.h"
13 #include <atomic>
14 #include <functional>
15 #include <memory>
16 #include <type_traits>
17 #include <utility>
18 
19 #define SK_SUPPORT_TRANSITION_TO_SP_INTERFACES
20 
31 class SK_API SkRefCntBase : SkNoncopyable {
32 public:
35  SkRefCntBase() : fRefCnt(1) {}
36 
39  virtual ~SkRefCntBase() {
40 #ifdef SK_DEBUG
41  SkASSERTF(getRefCnt() == 1, "fRefCnt was %d", getRefCnt());
42  // illegal value, to catch us if we reuse after delete
43  fRefCnt.store(0, std::memory_order_relaxed);
44 #endif
45  }
46 
47 #ifdef SK_DEBUG
48 
49  int32_t getRefCnt() const {
50  return fRefCnt.load(std::memory_order_relaxed);
51  }
52 
53  void validate() const {
54  SkASSERT(getRefCnt() > 0);
55  }
56 #endif
57 
61  bool unique() const {
62  if (1 == fRefCnt.load(std::memory_order_acquire)) {
63  // The acquire barrier is only really needed if we return true. It
64  // prevents code conditioned on the result of unique() from running
65  // until previous owners are all totally done calling unref().
66  return true;
67  }
68  return false;
69  }
70 
73  void ref() const {
74 #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
75  // Android employs some special subclasses that enable the fRefCnt to
76  // go to zero, but not below, prior to reusing the object. This breaks
77  // the use of unique() on such objects and as such should be removed
78  // once the Android code is fixed.
79  SkASSERT(getRefCnt() >= 0);
80 #else
81  SkASSERT(getRefCnt() > 0);
82 #endif
83  // No barrier required.
84  (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
85  }
86 
91  void unref() const {
92  SkASSERT(getRefCnt() > 0);
93  // A release here acts in place of all releases we "should" have been doing in ref().
94  if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
95  // Like unique(), the acquire is only needed on success, to make sure
96  // code in internal_dispose() doesn't happen before the decrement.
97  this->internal_dispose();
98  }
99  }
100 
101 protected:
108  SkASSERT(0 == getRefCnt());
109  fRefCnt.store(1, std::memory_order_relaxed);
110  }
111 
112 private:
116  virtual void internal_dispose() const {
117  this->internal_dispose_restore_refcnt_to_1();
118  delete this;
119  }
120 
121  // The following friends are those which override internal_dispose()
122  // and conditionally call SkRefCnt::internal_dispose().
123  friend class SkWeakRefCnt;
124 
125  mutable std::atomic<int32_t> fRefCnt;
126 
127  typedef SkNoncopyable INHERITED;
128 };
129 
130 #ifdef SK_REF_CNT_MIXIN_INCLUDE
131 // It is the responsibility of the following include to define the type SkRefCnt.
132 // This SkRefCnt should normally derive from SkRefCntBase.
133 #include SK_REF_CNT_MIXIN_INCLUDE
134 #else
135 class SK_API SkRefCnt : public SkRefCntBase {
136  // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
137  #if defined(GOOGLE3)
138  public:
139  void deref() const { this->unref(); }
140  #endif
141 };
142 #endif
143 
145 
150 #define SkRefCnt_SafeAssign(dst, src) \
151  do { \
152  if (src) src->ref(); \
153  if (dst) dst->unref(); \
154  dst = src; \
155  } while (0)
156 
157 
160 template <typename T> static inline T* SkRef(T* obj) {
161  SkASSERT(obj);
162  obj->ref();
163  return obj;
164 }
165 
168 template <typename T> static inline T* SkSafeRef(T* obj) {
169  if (obj) {
170  obj->ref();
171  }
172  return obj;
173 }
174 
177 template <typename T> static inline void SkSafeUnref(T* obj) {
178  if (obj) {
179  obj->unref();
180  }
181 }
182 
183 template<typename T> static inline void SkSafeSetNull(T*& obj) {
184  if (obj) {
185  obj->unref();
186  obj = nullptr;
187  }
188 }
189 
191 
192 template <typename T> struct SkTUnref {
193  void operator()(T* t) { t->unref(); }
194 };
195 
199 template <typename T> class SkAutoTUnref : public std::unique_ptr<T, SkTUnref<T>> {
200 public:
201  explicit SkAutoTUnref(T* obj = nullptr) : std::unique_ptr<T, SkTUnref<T>>(obj) {}
202 
203  operator T*() const { return this->get(); }
204 
205 #if defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
206  // Need to update graphics/Shader.cpp.
207  T* detach() { return this->release(); }
208 #endif
209 };
210 // Can't use the #define trick below to guard a bare SkAutoTUnref(...) because it's templated. :(
211 
212 class SkAutoUnref : public SkAutoTUnref<SkRefCnt> {
213 public:
215 };
216 #define SkAutoUnref(...) SK_REQUIRE_LOCAL_VAR(SkAutoUnref)
217 
218 // This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
219 // There's only benefit to using this if the deriving class does not otherwise need a vtable.
220 template <typename Derived>
221 class SkNVRefCnt : SkNoncopyable {
222 public:
223  SkNVRefCnt() : fRefCnt(1) {}
224  ~SkNVRefCnt() { SkASSERTF(1 == getRefCnt(), "NVRefCnt was %d", getRefCnt()); }
225 
226  // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
227  // - unique() needs acquire when it returns true, and no barrier if it returns false;
228  // - ref() doesn't need any barrier;
229  // - unref() needs a release barrier, and an acquire if it's going to call delete.
230 
231  bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
232  void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
233  void unref() const {
234  if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
235  // restore the 1 for our destructor's assert
236  SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
237  delete (const Derived*)this;
238  }
239  }
240  void deref() const { this->unref(); }
241 
242 private:
243  mutable std::atomic<int32_t> fRefCnt;
244  int32_t getRefCnt() const {
245  return fRefCnt.load(std::memory_order_relaxed);
246  }
247 };
248 
250 
258 template <typename T> class sk_sp {
261 public:
262  using element_type = T;
263 
264  constexpr sk_sp() : fPtr(nullptr) {}
265  constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
266 
271  sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
272  template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
273  sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
274 
280  sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
281  template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
282  sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
283 
288  explicit sk_sp(T* obj) : fPtr(obj) {}
289 
293  ~sk_sp() {
294  SkSafeUnref(fPtr);
295  SkDEBUGCODE(fPtr = nullptr);
296  }
297 
298  sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
299 
305  sk_sp<T>& operator=(const sk_sp<T>& that) {
306  this->reset(SkSafeRef(that.get()));
307  return *this;
308  }
309  template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
310  sk_sp<T>& operator=(const sk_sp<U>& that) {
311  this->reset(SkSafeRef(that.get()));
312  return *this;
313  }
314 
321  this->reset(that.release());
322  return *this;
323  }
324  template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
325  sk_sp<T>& operator=(sk_sp<U>&& that) {
326  this->reset(that.release());
327  return *this;
328  }
329 
330  T& operator*() const {
331  SkASSERT(this->get() != nullptr);
332  return *this->get();
333  }
334 
335  // MSVC 2013 does not work correctly with explicit operator bool.
336  // https://chromium-cpp.appspot.com/#core-blacklist
337  // When explicit operator bool can be used, remove operator! and operator unspecified_bool_type.
338  //explicit operator bool() const { return this->get() != nullptr; }
339  operator unspecified_bool_type() const { return this->get() ? &sk_sp::fPtr : nullptr; }
340  bool operator!() const { return this->get() == nullptr; }
341 
342  T* get() const { return fPtr; }
343  T* operator->() const { return fPtr; }
344 
349  void reset(T* ptr = nullptr) {
350  // Calling fPtr->unref() may call this->~() or this->reset(T*).
351  // http://wg21.cmeerw.net/lwg/issue998
352  // http://wg21.cmeerw.net/lwg/issue2262
353  T* oldPtr = fPtr;
354  fPtr = ptr;
355  SkSafeUnref(oldPtr);
356  }
357 
363  T* SK_WARN_UNUSED_RESULT release() {
364  T* ptr = fPtr;
365  fPtr = nullptr;
366  return ptr;
367  }
368 
369  void swap(sk_sp<T>& that) /*noexcept*/ {
370  using std::swap;
371  swap(fPtr, that.fPtr);
372  }
373 
374 private:
375  T* fPtr;
376 };
377 
378 template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
379  a.swap(b);
380 }
381 
382 template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
383  return a.get() == b.get();
384 }
385 template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
386  return !a;
387 }
388 template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
389  return !b;
390 }
391 
392 template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
393  return a.get() != b.get();
394 }
395 template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
396  return static_cast<bool>(a);
397 }
398 template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
399  return static_cast<bool>(b);
400 }
401 
402 template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) {
403  // Provide defined total order on sk_sp.
404  // http://wg21.cmeerw.net/lwg/issue1297
405  // http://wg21.cmeerw.net/lwg/issue1401 .
406  return std::less<skstd::common_type_t<T*, U*>>()(a.get(), b.get());
407 }
408 template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) {
409  return std::less<T*>()(a.get(), nullptr);
410 }
411 template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) {
412  return std::less<T*>()(nullptr, b.get());
413 }
414 
415 template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) {
416  return !(b < a);
417 }
418 template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) {
419  return !(nullptr < a);
420 }
421 template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) {
422  return !(b < nullptr);
423 }
424 
425 template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) {
426  return b < a;
427 }
428 template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) {
429  return nullptr < a;
430 }
431 template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) {
432  return b < nullptr;
433 }
434 
435 template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) {
436  return !(a < b);
437 }
438 template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) {
439  return !(a < nullptr);
440 }
441 template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) {
442  return !(nullptr < b);
443 }
444 
445 template <typename T, typename... Args>
446 sk_sp<T> sk_make_sp(Args&&... args) {
447  return sk_sp<T>(new T(std::forward<Args>(args)...));
448 }
449 
450 #ifdef SK_SUPPORT_TRANSITION_TO_SP_INTERFACES
451 
452 /*
453  * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
454  *
455  * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
456  * effectively "adopting" it.
457  *
458  * This function may be helpful while we convert callers from ptr-based to sk_sp-based parameters.
459  */
460 template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
461  return sk_sp<T>(SkSafeRef(obj));
462 }
463 
464 #endif
465 
466 #endif
T *SK_WARN_UNUSED_RESULT release()
Return the bare pointer, and set the internal object pointer to nullptr.
Definition: SkRefCnt.h:363
void reset(T *ptr=nullptr)
Adopt the new bare pointer, and call unref() on any previously held object (if not null)...
Definition: SkRefCnt.h:349
sk_sp(T *obj)
Adopt the bare pointer into the newly created sk_sp.
Definition: SkRefCnt.h:288
SkPathEffect is the base class for objects in the SkPaint that affect the geometry of a drawing primi...
Definition: SkPathEffect.h:29
Definition: SkRefCnt.h:212
void internal_dispose_restore_refcnt_to_1() const
Allow subclasses to call this if they've overridden internal_dispose so they can reset fRefCnt before...
Definition: SkRefCnt.h:107
int32_t getRefCnt() const
Return the reference count.
Definition: SkRefCnt.h:49
Definition: SkRefCnt.h:135
sk_sp< T > & operator=(sk_sp< T > &&that)
Move the underlying object from the argument to the sk_sp.
Definition: SkRefCnt.h:320
void unref() const
Decrement the reference count.
Definition: SkRefCnt.h:91
bool unique() const
May return true if the caller is the only owner.
Definition: SkRefCnt.h:61
void ref() const
Increment the reference count.
Definition: SkRefCnt.h:73
sk_sp(sk_sp< T > &&that)
Move the underlying object from the argument to the newly created sk_sp.
Definition: SkRefCnt.h:280
Shared pointer class to wrap classes that support a ref()/unref() interface.
Definition: SkRefCnt.h:258
Utility class that simply unref's its argument in the destructor.
Definition: SkRefCnt.h:199
SkPathEffect *sk_sp::* unspecified_bool_type
Supports safe bool idiom.
Definition: SkRefCnt.h:260
Definition: SkRefCnt.h:192
SkRefCntBase()
Default construct, initializing the reference count to 1.
Definition: SkRefCnt.h:35
sk_sp(const sk_sp< T > &that)
Shares the underlying object by calling ref(), so that both the argument and the newly created sk_sp ...
Definition: SkRefCnt.h:271
virtual void internal_dispose() const
Called when the ref count goes to 0.
Definition: SkRefCnt.h:116
virtual ~SkRefCntBase()
Destruct, asserting that the reference count is 1.
Definition: SkRefCnt.h:39
~sk_sp()
Calls unref() on the underlying object pointer.
Definition: SkRefCnt.h:293
SkRefCntBase is the base class for objects that may be shared by multiple objects.
Definition: SkRefCnt.h:31
Definition: SkRefCnt.h:221
sk_sp< T > & operator=(const sk_sp< T > &that)
Shares the underlying object referenced by the argument by calling ref() on it.
Definition: SkRefCnt.h:305