3 // Copyright (C) 2008-2016 Tim Blechmann
5 // Distributed under the Boost Software License, Version 1.0. (See
6 // accompanying file LICENSE_1_0.txt or copy at
7 // http://www.boost.org/LICENSE_1_0.txt)
9 #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
10 #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
16 #include <boost/array.hpp>
17 #include <boost/config.hpp>
18 #include <boost/cstdint.hpp>
19 #include <boost/noncopyable.hpp>
20 #include <boost/static_assert.hpp>
22 #include <boost/align/align_up.hpp>
23 #include <boost/align/aligned_allocator_adaptor.hpp>
25 #include <boost/lockfree/detail/atomic.hpp>
26 #include <boost/lockfree/detail/parameter.hpp>
27 #include <boost/lockfree/detail/tagged_ptr.hpp>
31 #pragma warning(disable: 4100) // unreferenced formal parameter
32 #pragma warning(disable: 4127) // conditional expression is constant
40 typename Alloc = std::allocator<T>
47 tagged_ptr<freelist_node> next;
50 typedef tagged_ptr<freelist_node> tagged_node_ptr;
54 typedef tagged_ptr<T> tagged_node_handle;
56 template <typename Allocator>
57 freelist_stack (Allocator const & alloc, std::size_t n = 0):
59 pool_(tagged_node_ptr(NULL))
61 for (std::size_t i = 0; i != n; ++i) {
62 T * node = Alloc::allocate(1);
63 std::memset(node, 0, sizeof(T));
64 #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
65 destruct<false>(node);
67 deallocate<false>(node);
72 template <bool ThreadSafe>
73 void reserve (std::size_t count)
75 for (std::size_t i = 0; i != count; ++i) {
76 T * node = Alloc::allocate(1);
77 std::memset(node, 0, sizeof(T));
78 deallocate<ThreadSafe>(node);
82 template <bool ThreadSafe, bool Bounded>
85 T * node = allocate<ThreadSafe, Bounded>();
91 template <bool ThreadSafe, bool Bounded, typename ArgumentType>
92 T * construct (ArgumentType const & arg)
94 T * node = allocate<ThreadSafe, Bounded>();
100 template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
101 T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
103 T * node = allocate<ThreadSafe, Bounded>();
105 new(node) T(arg1, arg2);
109 template <bool ThreadSafe>
110 void destruct (tagged_node_handle const & tagged_ptr)
112 T * n = tagged_ptr.get_ptr();
114 deallocate<ThreadSafe>(n);
117 template <bool ThreadSafe>
118 void destruct (T * n)
121 deallocate<ThreadSafe>(n);
124 ~freelist_stack(void)
126 tagged_node_ptr current = pool_.load();
129 freelist_node * current_ptr = current.get_ptr();
131 current = current_ptr->next;
132 Alloc::deallocate((T*)current_ptr, 1);
136 bool is_lock_free(void) const
138 return pool_.is_lock_free();
141 T * get_handle(T * pointer) const
146 T * get_handle(tagged_node_handle const & handle) const
148 return get_pointer(handle);
151 T * get_pointer(tagged_node_handle const & tptr) const
153 return tptr.get_ptr();
156 T * get_pointer(T * pointer) const
161 T * null_handle(void) const
166 protected: // allow use from subclasses
167 template <bool ThreadSafe, bool Bounded>
171 return allocate_impl<Bounded>();
173 return allocate_impl_unsafe<Bounded>();
177 template <bool Bounded>
178 T * allocate_impl (void)
180 tagged_node_ptr old_pool = pool_.load(memory_order_consume);
183 if (!old_pool.get_ptr()) {
185 T *ptr = Alloc::allocate(1);
186 std::memset(ptr, 0, sizeof(T));
193 freelist_node * new_pool_ptr = old_pool->next.get_ptr();
194 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
196 if (pool_.compare_exchange_weak(old_pool, new_pool)) {
197 void * ptr = old_pool.get_ptr();
198 return reinterpret_cast<T*>(ptr);
203 template <bool Bounded>
204 T * allocate_impl_unsafe (void)
206 tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
208 if (!old_pool.get_ptr()) {
210 T *ptr = Alloc::allocate(1);
211 std::memset(ptr, 0, sizeof(T));
218 freelist_node * new_pool_ptr = old_pool->next.get_ptr();
219 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
221 pool_.store(new_pool, memory_order_relaxed);
222 void * ptr = old_pool.get_ptr();
223 return reinterpret_cast<T*>(ptr);
227 template <bool ThreadSafe>
228 void deallocate (T * n)
233 deallocate_impl_unsafe(n);
237 void deallocate_impl (T * n)
240 tagged_node_ptr old_pool = pool_.load(memory_order_consume);
241 freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
244 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
245 new_pool->next.set_ptr(old_pool.get_ptr());
247 if (pool_.compare_exchange_weak(old_pool, new_pool))
252 void deallocate_impl_unsafe (T * n)
255 tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
256 freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
258 tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
259 new_pool->next.set_ptr(old_pool.get_ptr());
261 pool_.store(new_pool, memory_order_relaxed);
264 atomic<tagged_node_ptr> pool_;
268 BOOST_ALIGNMENT( 4 ) // workaround for bugs in MSVC
272 typedef boost::uint16_t tag_t;
273 typedef boost::uint16_t index_t;
275 /** uninitialized constructor */
276 tagged_index(void) BOOST_NOEXCEPT //: index(0), tag(0)
279 /** copy constructor */
280 #ifdef BOOST_NO_CXX11_DEFAULTED_FUNCTIONS
281 tagged_index(tagged_index const & rhs):
282 index(rhs.index), tag(rhs.tag)
285 tagged_index(tagged_index const & rhs) = default;
288 explicit tagged_index(index_t i, tag_t t = 0):
294 index_t get_index() const
299 void set_index(index_t i)
307 tag_t get_tag() const
312 tag_t get_next_tag() const
314 tag_t next = (get_tag() + 1u) & (std::numeric_limits<tag_t>::max)();
318 void set_tag(tag_t t)
324 bool operator==(tagged_index const & rhs) const
326 return (index == rhs.index) && (tag == rhs.tag);
329 bool operator!=(tagged_index const & rhs) const
331 return !operator==(rhs);
339 template <typename T,
341 struct compiletime_sized_freelist_storage
343 // array-based freelists only support a 16bit address space.
344 BOOST_STATIC_ASSERT(size < 65536);
346 boost::array<char, size * sizeof(T) + 64> data;
348 // unused ... only for API purposes
349 template <typename Allocator>
350 compiletime_sized_freelist_storage(Allocator const & /* alloc */, std::size_t /* count */)
355 T * nodes(void) const
357 char * data_pointer = const_cast<char*>(data.data());
358 return reinterpret_cast<T*>( boost::alignment::align_up( data_pointer, BOOST_LOCKFREE_CACHELINE_BYTES ) );
361 std::size_t node_count(void) const
367 template <typename T,
368 typename Alloc = std::allocator<T> >
369 struct runtime_sized_freelist_storage:
370 boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES >
372 typedef boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES > allocator_type;
374 std::size_t node_count_;
376 template <typename Allocator>
377 runtime_sized_freelist_storage(Allocator const & alloc, std::size_t count):
378 allocator_type(alloc), node_count_(count)
381 boost::throw_exception(std::runtime_error("boost.lockfree: freelist size is limited to a maximum of 65535 objects"));
382 nodes_ = allocator_type::allocate(count);
383 std::memset(nodes_, 0, sizeof(T) * count);
386 ~runtime_sized_freelist_storage(void)
388 allocator_type::deallocate(nodes_, node_count_);
391 T * nodes(void) const
396 std::size_t node_count(void) const
403 template <typename T,
404 typename NodeStorage = runtime_sized_freelist_storage<T>
406 class fixed_size_freelist:
414 void initialize(void)
416 T * nodes = NodeStorage::nodes();
417 for (std::size_t i = 0; i != NodeStorage::node_count(); ++i) {
418 tagged_index * next_index = reinterpret_cast<tagged_index*>(nodes + i);
419 next_index->set_index(null_handle());
421 #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
422 destruct<false>(nodes + i);
424 deallocate<false>(static_cast<index_t>(i));
430 typedef tagged_index tagged_node_handle;
431 typedef tagged_index::index_t index_t;
433 template <typename Allocator>
434 fixed_size_freelist (Allocator const & alloc, std::size_t count):
435 NodeStorage(alloc, count),
436 pool_(tagged_index(static_cast<index_t>(count), 0))
441 fixed_size_freelist (void):
442 pool_(tagged_index(NodeStorage::node_count(), 0))
447 template <bool ThreadSafe, bool Bounded>
450 index_t node_index = allocate<ThreadSafe>();
451 if (node_index == null_handle())
454 T * node = NodeStorage::nodes() + node_index;
459 template <bool ThreadSafe, bool Bounded, typename ArgumentType>
460 T * construct (ArgumentType const & arg)
462 index_t node_index = allocate<ThreadSafe>();
463 if (node_index == null_handle())
466 T * node = NodeStorage::nodes() + node_index;
471 template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
472 T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
474 index_t node_index = allocate<ThreadSafe>();
475 if (node_index == null_handle())
478 T * node = NodeStorage::nodes() + node_index;
479 new(node) T(arg1, arg2);
483 template <bool ThreadSafe>
484 void destruct (tagged_node_handle tagged_index)
486 index_t index = tagged_index.get_index();
487 T * n = NodeStorage::nodes() + index;
488 (void)n; // silence msvc warning
490 deallocate<ThreadSafe>(index);
493 template <bool ThreadSafe>
494 void destruct (T * n)
497 deallocate<ThreadSafe>(static_cast<index_t>(n - NodeStorage::nodes()));
500 bool is_lock_free(void) const
502 return pool_.is_lock_free();
505 index_t null_handle(void) const
507 return static_cast<index_t>(NodeStorage::node_count());
510 index_t get_handle(T * pointer) const
513 return null_handle();
515 return static_cast<index_t>(pointer - NodeStorage::nodes());
518 index_t get_handle(tagged_node_handle const & handle) const
520 return handle.get_index();
523 T * get_pointer(tagged_node_handle const & tptr) const
525 return get_pointer(tptr.get_index());
528 T * get_pointer(index_t index) const
530 if (index == null_handle())
533 return NodeStorage::nodes() + index;
536 T * get_pointer(T * ptr) const
541 protected: // allow use from subclasses
542 template <bool ThreadSafe>
543 index_t allocate (void)
546 return allocate_impl();
548 return allocate_impl_unsafe();
552 index_t allocate_impl (void)
554 tagged_index old_pool = pool_.load(memory_order_consume);
557 index_t index = old_pool.get_index();
558 if (index == null_handle())
561 T * old_node = NodeStorage::nodes() + index;
562 tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
564 tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
566 if (pool_.compare_exchange_weak(old_pool, new_pool))
567 return old_pool.get_index();
571 index_t allocate_impl_unsafe (void)
573 tagged_index old_pool = pool_.load(memory_order_consume);
575 index_t index = old_pool.get_index();
576 if (index == null_handle())
579 T * old_node = NodeStorage::nodes() + index;
580 tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
582 tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
584 pool_.store(new_pool, memory_order_relaxed);
585 return old_pool.get_index();
588 template <bool ThreadSafe>
589 void deallocate (index_t index)
592 deallocate_impl(index);
594 deallocate_impl_unsafe(index);
597 void deallocate_impl (index_t index)
599 freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
600 tagged_index old_pool = pool_.load(memory_order_consume);
603 tagged_index new_pool (index, old_pool.get_tag());
604 new_pool_node->next.set_index(old_pool.get_index());
606 if (pool_.compare_exchange_weak(old_pool, new_pool))
611 void deallocate_impl_unsafe (index_t index)
613 freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
614 tagged_index old_pool = pool_.load(memory_order_consume);
616 tagged_index new_pool (index, old_pool.get_tag());
617 new_pool_node->next.set_index(old_pool.get_index());
619 pool_.store(new_pool);
622 atomic<tagged_index> pool_;
625 template <typename T,
627 bool IsCompileTimeSized,
631 struct select_freelist
633 typedef typename mpl::if_c<IsCompileTimeSized,
634 compiletime_sized_freelist_storage<T, Capacity>,
635 runtime_sized_freelist_storage<T, Alloc>
636 >::type fixed_sized_storage_type;
638 typedef typename mpl::if_c<IsCompileTimeSized || IsFixedSize,
639 fixed_size_freelist<T, fixed_sized_storage_type>,
640 freelist_stack<T, Alloc>
644 template <typename T, bool IsNodeBased>
645 struct select_tagged_handle
647 typedef typename mpl::if_c<IsNodeBased,
650 >::type tagged_handle_type;
652 typedef typename mpl::if_c<IsNodeBased,
654 typename tagged_index::index_t
659 } /* namespace detail */
660 } /* namespace lockfree */
661 } /* namespace boost */
663 #if defined(_MSC_VER)
668 #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */