3 // Copyright (C) 2008-2016 Tim Blechmann
5 // Distributed under the Boost Software License, Version 1.0. (See
6 // accompanying file LICENSE_1_0.txt or copy at
7 // http://www.boost.org/LICENSE_1_0.txt)
9 #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
10 #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
15 #include <boost/array.hpp>
16 #include <boost/config.hpp>
17 #include <boost/cstdint.hpp>
18 #include <boost/noncopyable.hpp>
19 #include <boost/static_assert.hpp>
21 #include <boost/align/align_up.hpp>
22 #include <boost/align/aligned_allocator_adaptor.hpp>
24 #include <boost/lockfree/detail/atomic.hpp>
25 #include <boost/lockfree/detail/parameter.hpp>
26 #include <boost/lockfree/detail/tagged_ptr.hpp>
30 #pragma warning(disable: 4100) // unreferenced formal parameter
31 #pragma warning(disable: 4127) // conditional expression is constant
39 typename Alloc = std::allocator<T>
46 tagged_ptr<freelist_node> next;
49 typedef tagged_ptr<freelist_node> tagged_node_ptr;
53 typedef tagged_ptr<T> tagged_node_handle;
55 template <typename Allocator>
56 freelist_stack (Allocator const & alloc, std::size_t n = 0):
58 pool_(tagged_node_ptr(NULL))
60 for (std::size_t i = 0; i != n; ++i) {
61 T * node = Alloc::allocate(1);
62 #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
63 destruct<false>(node);
65 deallocate<false>(node);
70 template <bool ThreadSafe>
71 void reserve (std::size_t count)
73 for (std::size_t i = 0; i != count; ++i) {
74 T * node = Alloc::allocate(1);
75 deallocate<ThreadSafe>(node);
79 template <bool ThreadSafe, bool Bounded>
82 T * node = allocate<ThreadSafe, Bounded>();
88 template <bool ThreadSafe, bool Bounded, typename ArgumentType>
89 T * construct (ArgumentType const & arg)
91 T * node = allocate<ThreadSafe, Bounded>();
97 template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
98 T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
100 T * node = allocate<ThreadSafe, Bounded>();
102 new(node) T(arg1, arg2);
106 template <bool ThreadSafe>
107 void destruct (tagged_node_handle tagged_ptr)
109 T * n = tagged_ptr.get_ptr();
111 deallocate<ThreadSafe>(n);
114 template <bool ThreadSafe>
115 void destruct (T * n)
118 deallocate<ThreadSafe>(n);
121 ~freelist_stack(void)
123 tagged_node_ptr current = pool_.load();
126 freelist_node * current_ptr = current.get_ptr();
128 current = current_ptr->next;
129 Alloc::deallocate((T*)current_ptr, 1);
133 bool is_lock_free(void) const
135 return pool_.is_lock_free();
138 T * get_handle(T * pointer) const
143 T * get_handle(tagged_node_handle const & handle) const
145 return get_pointer(handle);
148 T * get_pointer(tagged_node_handle const & tptr) const
150 return tptr.get_ptr();
153 T * get_pointer(T * pointer) const
158 T * null_handle(void) const
163 protected: // allow use from subclasses
164 template <bool ThreadSafe, bool Bounded>
168 return allocate_impl<Bounded>();
170 return allocate_impl_unsafe<Bounded>();
174 template <bool Bounded>
175 T * allocate_impl (void)
177 tagged_node_ptr old_pool = pool_.load(memory_order_consume);
180 if (!old_pool.get_ptr()) {
182 return Alloc::allocate(1);
187 freelist_node * new_pool_ptr = old_pool->next.get_ptr();
188 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
190 if (pool_.compare_exchange_weak(old_pool, new_pool)) {
191 void * ptr = old_pool.get_ptr();
192 return reinterpret_cast<T*>(ptr);
197 template <bool Bounded>
198 T * allocate_impl_unsafe (void)
200 tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
202 if (!old_pool.get_ptr()) {
204 return Alloc::allocate(1);
209 freelist_node * new_pool_ptr = old_pool->next.get_ptr();
210 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
212 pool_.store(new_pool, memory_order_relaxed);
213 void * ptr = old_pool.get_ptr();
214 return reinterpret_cast<T*>(ptr);
218 template <bool ThreadSafe>
219 void deallocate (T * n)
224 deallocate_impl_unsafe(n);
228 void deallocate_impl (T * n)
231 tagged_node_ptr old_pool = pool_.load(memory_order_consume);
232 freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
235 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
236 new_pool->next.set_ptr(old_pool.get_ptr());
238 if (pool_.compare_exchange_weak(old_pool, new_pool))
243 void deallocate_impl_unsafe (T * n)
246 tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
247 freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
249 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
250 new_pool->next.set_ptr(old_pool.get_ptr());
252 pool_.store(new_pool, memory_order_relaxed);
255 atomic<tagged_node_ptr> pool_;
259 BOOST_ALIGNMENT( 4 ) // workaround for bugs in MSVC
263 typedef boost::uint16_t tag_t;
264 typedef boost::uint16_t index_t;
266 /** uninitialized constructor */
267 tagged_index(void) BOOST_NOEXCEPT //: index(0), tag(0)
270 /** copy constructor */
271 #ifdef BOOST_NO_CXX11_DEFAULTED_FUNCTIONS
272 tagged_index(tagged_index const & rhs):
273 index(rhs.index), tag(rhs.tag)
276 tagged_index(tagged_index const & rhs) = default;
279 explicit tagged_index(index_t i, tag_t t = 0):
285 index_t get_index() const
290 void set_index(index_t i)
298 tag_t get_tag() const
303 tag_t get_next_tag() const
305 tag_t next = (get_tag() + 1u) & (std::numeric_limits<tag_t>::max)();
309 void set_tag(tag_t t)
315 bool operator==(tagged_index const & rhs) const
317 return (index == rhs.index) && (tag == rhs.tag);
320 bool operator!=(tagged_index const & rhs) const
322 return !operator==(rhs);
330 template <typename T,
332 struct compiletime_sized_freelist_storage
334 // array-based freelists only support a 16bit address space.
335 BOOST_STATIC_ASSERT(size < 65536);
337 boost::array<char, size * sizeof(T) + 64> data;
339 // unused ... only for API purposes
340 template <typename Allocator>
341 compiletime_sized_freelist_storage(Allocator const & /* alloc */, std::size_t /* count */)
344 T * nodes(void) const
346 char * data_pointer = const_cast<char*>(data.data());
347 return reinterpret_cast<T*>( boost::alignment::align_up( data_pointer, BOOST_LOCKFREE_CACHELINE_BYTES ) );
350 std::size_t node_count(void) const
356 template <typename T,
357 typename Alloc = std::allocator<T> >
358 struct runtime_sized_freelist_storage:
359 boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES >
361 typedef boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES > allocator_type;
363 std::size_t node_count_;
365 template <typename Allocator>
366 runtime_sized_freelist_storage(Allocator const & alloc, std::size_t count):
367 allocator_type(alloc), node_count_(count)
370 boost::throw_exception(std::runtime_error("boost.lockfree: freelist size is limited to a maximum of 65535 objects"));
371 nodes_ = allocator_type::allocate(count);
374 ~runtime_sized_freelist_storage(void)
376 allocator_type::deallocate(nodes_, node_count_);
379 T * nodes(void) const
384 std::size_t node_count(void) const
391 template <typename T,
392 typename NodeStorage = runtime_sized_freelist_storage<T>
394 class fixed_size_freelist:
402 void initialize(void)
404 T * nodes = NodeStorage::nodes();
405 for (std::size_t i = 0; i != NodeStorage::node_count(); ++i) {
406 tagged_index * next_index = reinterpret_cast<tagged_index*>(nodes + i);
407 next_index->set_index(null_handle());
409 #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
410 destruct<false>(nodes + i);
412 deallocate<false>(static_cast<index_t>(i));
418 typedef tagged_index tagged_node_handle;
419 typedef tagged_index::index_t index_t;
421 template <typename Allocator>
422 fixed_size_freelist (Allocator const & alloc, std::size_t count):
423 NodeStorage(alloc, count),
424 pool_(tagged_index(static_cast<index_t>(count), 0))
429 fixed_size_freelist (void):
430 pool_(tagged_index(NodeStorage::node_count(), 0))
435 template <bool ThreadSafe, bool Bounded>
438 index_t node_index = allocate<ThreadSafe>();
439 if (node_index == null_handle())
442 T * node = NodeStorage::nodes() + node_index;
447 template <bool ThreadSafe, bool Bounded, typename ArgumentType>
448 T * construct (ArgumentType const & arg)
450 index_t node_index = allocate<ThreadSafe>();
451 if (node_index == null_handle())
454 T * node = NodeStorage::nodes() + node_index;
459 template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
460 T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
462 index_t node_index = allocate<ThreadSafe>();
463 if (node_index == null_handle())
466 T * node = NodeStorage::nodes() + node_index;
467 new(node) T(arg1, arg2);
471 template <bool ThreadSafe>
472 void destruct (tagged_node_handle tagged_index)
474 index_t index = tagged_index.get_index();
475 T * n = NodeStorage::nodes() + index;
476 (void)n; // silence msvc warning
478 deallocate<ThreadSafe>(index);
481 template <bool ThreadSafe>
482 void destruct (T * n)
485 deallocate<ThreadSafe>(n - NodeStorage::nodes());
488 bool is_lock_free(void) const
490 return pool_.is_lock_free();
493 index_t null_handle(void) const
495 return static_cast<index_t>(NodeStorage::node_count());
498 index_t get_handle(T * pointer) const
501 return null_handle();
503 return static_cast<index_t>(pointer - NodeStorage::nodes());
506 index_t get_handle(tagged_node_handle const & handle) const
508 return handle.get_index();
511 T * get_pointer(tagged_node_handle const & tptr) const
513 return get_pointer(tptr.get_index());
516 T * get_pointer(index_t index) const
518 if (index == null_handle())
521 return NodeStorage::nodes() + index;
524 T * get_pointer(T * ptr) const
529 protected: // allow use from subclasses
530 template <bool ThreadSafe>
531 index_t allocate (void)
534 return allocate_impl();
536 return allocate_impl_unsafe();
540 index_t allocate_impl (void)
542 tagged_index old_pool = pool_.load(memory_order_consume);
545 index_t index = old_pool.get_index();
546 if (index == null_handle())
549 T * old_node = NodeStorage::nodes() + index;
550 tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
552 tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
554 if (pool_.compare_exchange_weak(old_pool, new_pool))
555 return old_pool.get_index();
559 index_t allocate_impl_unsafe (void)
561 tagged_index old_pool = pool_.load(memory_order_consume);
563 index_t index = old_pool.get_index();
564 if (index == null_handle())
567 T * old_node = NodeStorage::nodes() + index;
568 tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
570 tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
572 pool_.store(new_pool, memory_order_relaxed);
573 return old_pool.get_index();
576 template <bool ThreadSafe>
577 void deallocate (index_t index)
580 deallocate_impl(index);
582 deallocate_impl_unsafe(index);
585 void deallocate_impl (index_t index)
587 freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
588 tagged_index old_pool = pool_.load(memory_order_consume);
591 tagged_index new_pool (index, old_pool.get_tag());
592 new_pool_node->next.set_index(old_pool.get_index());
594 if (pool_.compare_exchange_weak(old_pool, new_pool))
599 void deallocate_impl_unsafe (index_t index)
601 freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
602 tagged_index old_pool = pool_.load(memory_order_consume);
604 tagged_index new_pool (index, old_pool.get_tag());
605 new_pool_node->next.set_index(old_pool.get_index());
607 pool_.store(new_pool);
610 atomic<tagged_index> pool_;
613 template <typename T,
615 bool IsCompileTimeSized,
619 struct select_freelist
621 typedef typename mpl::if_c<IsCompileTimeSized,
622 compiletime_sized_freelist_storage<T, Capacity>,
623 runtime_sized_freelist_storage<T, Alloc>
624 >::type fixed_sized_storage_type;
626 typedef typename mpl::if_c<IsCompileTimeSized || IsFixedSize,
627 fixed_size_freelist<T, fixed_sized_storage_type>,
628 freelist_stack<T, Alloc>
632 template <typename T, bool IsNodeBased>
633 struct select_tagged_handle
635 typedef typename mpl::if_c<IsNodeBased,
638 >::type tagged_handle_type;
640 typedef typename mpl::if_c<IsNodeBased,
642 typename tagged_index::index_t
647 } /* namespace detail */
648 } /* namespace lockfree */
649 } /* namespace boost */
651 #if defined(_MSC_VER)
656 #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */