6 #ifndef XENIUM_LOCK_FREE_REF_COUNT_HPP
7 #define XENIUM_LOCK_FREE_REF_COUNT_HPP
9 #include <xenium/reclamation/detail/concurrent_ptr.hpp>
10 #include <xenium/reclamation/detail/guard_ptr.hpp>
11 #include <xenium/reclamation/detail/allocation_tracker.hpp>
13 #include <xenium/acquire_guard.hpp>
14 #include <xenium/parameter.hpp>
48 namespace reclamation {
50 bool InsertPadding =
false,
51 std::size_t ThreadLocalFreeListSize = 0
53 struct lock_free_ref_count_traits {
57 template <
class... Policies>
58 using with = lock_free_ref_count_traits<
81 template <
class Traits = lock_free_ref_count_traits<>>
84 template <
class T,
class MarkedPtr>
88 template <
class... Policies>
91 template <
class T, std::
size_t N = T::number_of_mark_bits>
94 template <
class T, std::
size_t N = 0,
class DeleterT = std::default_delete<T>>
95 class enable_concurrent_ptr;
97 class region_guard {};
101 static constexpr
unsigned RefCountInc = 2;
102 static constexpr
unsigned RefCountClaimBit = 1;
104 ALLOCATION_TRACKING_FUNCTIONS;
105 #ifdef TRACK_ALLOCATIONS
106 inline static thread_local detail::registered_allocation_counter<lock_free_ref_count> allocation_counter_;
107 static detail::allocation_counter& allocation_counter();
111 template <
class Traits>
112 template <
class T, std::
size_t N,
class DeleterT>
114 private detail::tracked_object<lock_free_ref_count>
117 enable_concurrent_ptr() noexcept
119 destroyed().store(
false, std::memory_order_relaxed);
121 enable_concurrent_ptr(
const enable_concurrent_ptr&) noexcept =
delete;
122 enable_concurrent_ptr(enable_concurrent_ptr&&) noexcept = delete;
123 enable_concurrent_ptr& operator=(const enable_concurrent_ptr&) noexcept = delete;
124 enable_concurrent_ptr& operator=(enable_concurrent_ptr&&) noexcept = delete;
125 virtual ~enable_concurrent_ptr() noexcept
127 assert(!is_destroyed());
128 destroyed().store(
true, std::memory_order_relaxed);
131 using Deleter = DeleterT;
132 static_assert(std::is_same<Deleter, std::default_delete<T>>::value,
133 "lock_free_ref_count reclamation can only be used with std::default_delete as Deleter.");
135 static constexpr std::size_t number_of_mark_bits = N;
136 unsigned refs()
const {
return getHeader()->ref_count.load(std::memory_order_relaxed) >> 1; }
138 void*
operator new(
size_t sz);
139 void operator delete(
void* p);
142 bool decrement_refcnt();
143 bool is_destroyed()
const {
return getHeader()->destroyed.load(std::memory_order_relaxed); }
144 void push_to_free_list() { global_free_list.push(
static_cast<T*
>(
this)); }
146 struct unpadded_header {
147 std::atomic<unsigned> ref_count;
148 std::atomic<bool> destroyed;
149 concurrent_ptr<T, N> next_free;
151 struct padded_header : unpadded_header {
152 char padding[64 -
sizeof(unpadded_header)];
154 using header = std::conditional_t<Traits::insert_padding, padded_header, unpadded_header>;
155 header* getHeader() {
return static_cast<header*
>(
static_cast<void*
>(
this)) - 1; }
156 const header* getHeader()
const {
return static_cast<const header*
>(
static_cast<const void*
>(
this)) - 1; }
158 std::atomic<unsigned>& ref_count() {
return getHeader()->ref_count; }
159 std::atomic<bool>& destroyed() {
return getHeader()->destroyed; }
160 concurrent_ptr<T, N>& next_free() {
return getHeader()->next_free; }
162 friend class lock_free_ref_count;
164 using guard_ptr =
typename concurrent_ptr<T, N>::guard_ptr;
165 using marked_ptr =
typename concurrent_ptr<T, N>::marked_ptr;
168 static free_list global_free_list;
171 template <
class Traits>
172 template <
class T,
class MarkedPtr>
173 class lock_free_ref_count<Traits>::guard_ptr :
174 public detail::guard_ptr<T, MarkedPtr, guard_ptr<T, MarkedPtr>>
176 using base = detail::guard_ptr<T, MarkedPtr, guard_ptr>;
177 using Deleter =
typename T::Deleter;
179 template <
class, std::
size_t,
class>
180 friend class enable_concurrent_ptr;
183 explicit guard_ptr(
const MarkedPtr& p = MarkedPtr()) noexcept;
184 guard_ptr(const guard_ptr& p) noexcept;
185 guard_ptr(guard_ptr&& p) noexcept;
187 guard_ptr& operator=(const guard_ptr& p);
188 guard_ptr& operator=(guard_ptr&& p) noexcept;
191 void acquire(const concurrent_ptr<T>& p, std::memory_order order = std::memory_order_seq_cst) noexcept;
194 bool acquire_if_equal(const concurrent_ptr<T>& p,
195 const MarkedPtr& expected,
196 std::memory_order order = std::memory_order_seq_cst) noexcept;
199 void reset() noexcept;
202 void reclaim(Deleter d = Deleter()) noexcept;
206 #define LOCK_FREE_REF_COUNT_IMPL
207 #include <xenium/reclamation/impl/lock_free_ref_count.hpp>
208 #undef LOCK_FREE_REF_COUNT_IMPL