]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/container/detail/adaptive_node_pool_impl.hpp
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / boost / boost / container / detail / adaptive_node_pool_impl.hpp
1 //////////////////////////////////////////////////////////////////////////////
2 //
3 // (C) Copyright Ion Gaztanaga 2005-2013. Distributed under the Boost
4 // Software License, Version 1.0. (See accompanying file
5 // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6 //
7 // See http://www.boost.org/libs/container for documentation.
8 //
9 //////////////////////////////////////////////////////////////////////////////
10
11 #ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
12 #define BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
13
14 #ifndef BOOST_CONFIG_HPP
15 # include <boost/config.hpp>
16 #endif
17
18 #if defined(BOOST_HAS_PRAGMA_ONCE)
19 # pragma once
20 #endif
21
22 #include <boost/container/detail/config_begin.hpp>
23 #include <boost/container/detail/workaround.hpp>
24
25 // container
26 #include <boost/container/container_fwd.hpp>
27 #include <boost/container/throw_exception.hpp>
28 // container/detail
29 #include <boost/container/detail/pool_common.hpp>
30 #include <boost/container/detail/iterator.hpp>
31 #include <boost/move/detail/iterator_to_raw_pointer.hpp>
32 #include <boost/container/detail/math_functions.hpp>
33 #include <boost/container/detail/placement_new.hpp>
34 #include <boost/container/detail/mpl.hpp>
35 #include <boost/move/detail/to_raw_pointer.hpp>
36 #include <boost/move/detail/force_ptr.hpp>
37 #include <boost/container/detail/type_traits.hpp>
38 // intrusive
39 #include <boost/intrusive/pointer_traits.hpp>
40 #include <boost/intrusive/set.hpp>
41 #include <boost/intrusive/list.hpp>
42 #include <boost/intrusive/slist.hpp>
43 // other
44 #include <boost/assert.hpp>
45 #include <boost/core/no_exceptions_support.hpp>
46 #include <cstddef>
47
48 namespace boost {
49 namespace container {
50
51 namespace adaptive_pool_flag {
52
53 static const unsigned int none = 0u;
54 static const unsigned int align_only = 1u << 0u;
55 static const unsigned int size_ordered = 1u << 1u;
56 static const unsigned int address_ordered = 1u << 2u;
57
58 } //namespace adaptive_pool_flag{
59
60 namespace dtl {
61
62 template<class size_type>
63 struct hdr_offset_holder_t
64 {
65 hdr_offset_holder_t(size_type offset = 0)
66 : hdr_offset(offset)
67 {}
68 size_type hdr_offset;
69 };
70
71 template<class SizeType, unsigned int Flags>
72 struct less_func;
73
74 template<class SizeType>
75 struct less_func<SizeType, adaptive_pool_flag::none>
76 {
77 static bool less(SizeType, SizeType, const void *, const void *)
78 { return true; }
79 };
80
81 template<class SizeType>
82 struct less_func<SizeType, adaptive_pool_flag::size_ordered>
83 {
84 static bool less(SizeType ls, SizeType rs, const void *, const void *)
85 { return ls < rs; }
86 };
87
88 template<class SizeType>
89 struct less_func<SizeType, adaptive_pool_flag::address_ordered>
90 {
91 static bool less(SizeType, SizeType, const void *la, const void *ra)
92 { return la < ra; }
93 };
94
95 template<class SizeType>
96 struct less_func<SizeType, adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered>
97 {
98 static bool less(SizeType ls, SizeType rs, const void *la, const void *ra)
99 { return (ls < rs) || ((ls == rs) && (la < ra)); }
100 };
101
102 template<class VoidPointer, class SizeType, unsigned OrderFlags>
103 struct block_container_traits
104 {
105 typedef typename bi::make_set_base_hook
106 < bi::void_pointer<VoidPointer>
107 , bi::optimize_size<true>
108 , bi::link_mode<bi::normal_link> >::type hook_t;
109
110 template<class T>
111 struct container
112 {
113 typedef typename bi::make_multiset
114 <T, bi::base_hook<hook_t>, bi::size_type<SizeType> >::type type;
115 };
116
117 template<class Container>
118 static void reinsert_was_used(Container &container, typename Container::reference v, bool)
119 {
120 typedef typename Container::const_iterator const_block_iterator;
121 typedef typename Container::iterator block_iterator;
122 typedef typename Container::value_compare value_compare;
123
124 const block_iterator this_block(Container::s_iterator_to(v));
125 const const_block_iterator cendit(container.cend());
126 block_iterator next_block(this_block);
127
128 if(++next_block != cendit && value_compare()(*next_block, v)){
129 const_block_iterator next2_block(next_block);
130 //Test if v should be swapped with next (optimization)
131 if(++next2_block == cendit || !value_compare()(*next2_block, v)){
132 v.swap_nodes(*next_block);
133 BOOST_ASSERT(++next_block == this_block);
134 }
135 else{
136 container.erase(this_block);
137 container.insert(v);
138 }
139 }
140 }
141
142 template<class Container>
143 static void insert_was_empty(Container &container, typename Container::value_type &v, bool)
144 {
145 container.insert(v);
146 }
147
148 template<class Container>
149 static void erase_first(Container &container)
150 {
151 container.erase(container.cbegin());
152 }
153
154 template<class Container>
155 static void erase_last(Container &container)
156 {
157 container.erase(--container.cend());
158 }
159 };
160
161 template<class VoidPointer, class SizeType>
162 struct block_container_traits<VoidPointer, SizeType, 0u>
163 {
164 typedef typename bi::make_list_base_hook
165 < bi::void_pointer<VoidPointer>
166 , bi::link_mode<bi::normal_link> >::type hook_t;
167
168 template<class T>
169 struct container
170 {
171 typedef typename bi::make_list
172 <T, bi::base_hook<hook_t>, bi::size_type<SizeType>, bi::constant_time_size<false> >::type type;
173 };
174
175 template<class Container>
176 static void reinsert_was_used(Container &container, typename Container::value_type &v, bool is_full)
177 {
178 if(is_full){
179 container.erase(Container::s_iterator_to(v));
180 container.push_back(v);
181 }
182 }
183
184 template<class Container>
185 static void insert_was_empty(Container &container, typename Container::value_type &v, bool is_full)
186 {
187 if(is_full){
188 container.push_back(v);
189 }
190 else{
191 container.push_front(v);
192 }
193 }
194
195 template<class Container>
196 static void erase_first(Container &container)
197 {
198 container.pop_front();
199 }
200
201 template<class Container>
202 static void erase_last(Container &container)
203 {
204 container.pop_back();
205 }
206 };
207
208 /////////////////////////////
209 //
210 // adaptive_pool_types
211 //
212 /////////////////////////////
213 template<class MultiallocationChain, class VoidPointer, class SizeType, unsigned int Flags>
214 struct adaptive_pool_types
215 {
216 typedef VoidPointer void_pointer;
217 static const unsigned ordered = (Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered));
218 typedef block_container_traits<VoidPointer, SizeType, ordered> block_container_traits_t;
219 typedef typename block_container_traits_t::hook_t hook_t;
220 typedef hdr_offset_holder_t<SizeType> hdr_offset_holder;
221 static const unsigned int order_flags = Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered);
222 typedef MultiallocationChain free_nodes_t;
223
224 struct block_info_t
225 : public hdr_offset_holder,
226 public hook_t
227 {
228 //An intrusive list of free node from this block
229 free_nodes_t free_nodes;
230 friend bool operator <(const block_info_t &l, const block_info_t &r)
231 {
232 return less_func<SizeType, order_flags>::
233 less(l.free_nodes.size(), r.free_nodes.size(), &l , &r);
234 }
235
236 friend bool operator ==(const block_info_t &l, const block_info_t &r)
237 { return &l == &r; }
238 };
239 typedef typename block_container_traits_t:: template container<block_info_t>::type block_container_t;
240 };
241
242
243 /////////////////////////////////////////////
244 //
245 // candidate_power_of_2_ct
246 //
247 /////////////////////////////////////////////
248 template< std::size_t alignment
249 , std::size_t real_node_size
250 , std::size_t payload_per_allocation
251 , std::size_t min_elements_per_block
252 , std::size_t hdr_size
253 , std::size_t hdr_offset_size
254 , std::size_t overhead_percent>
255 struct candidate_power_of_2_ct_helper
256 {
257 static const std::size_t hdr_subblock_elements_alone = (alignment - hdr_size - payload_per_allocation)/real_node_size;
258 static const std::size_t hdr_subblock_elements_first = (alignment - hdr_size - payload_per_allocation)/real_node_size;
259 static const std::size_t elements_per_b_subblock_mid = (alignment - hdr_offset_size)/real_node_size;
260 static const std::size_t elements_per_b_subblock_end = (alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
261 static const std::size_t num_b_subblock =
262 hdr_subblock_elements_alone >= min_elements_per_block
263 ? 0
264 : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
265 ? 1
266 : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
267 )
268 ;
269
270 static const std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
271
272 static const std::size_t total_nodes = (num_b_subblock == 0)
273 ? hdr_subblock_elements_alone
274 : ( (num_b_subblock == 1)
275 ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
276 : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
277 )
278 ;
279 static const std::size_t total_data = total_nodes*real_node_size;
280 static const std::size_t total_size = alignment*(num_b_subblock+1);
281 static const bool overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
282 };
283
284 template< std::size_t initial_alignment
285 , std::size_t real_node_size
286 , std::size_t payload_per_allocation
287 , std::size_t min_elements_per_block
288 , std::size_t hdr_size
289 , std::size_t hdr_offset_size
290 , std::size_t overhead_percent
291 , bool Loop = true>
292 struct candidate_power_of_2_ct
293 {
294 typedef candidate_power_of_2_ct_helper
295 < initial_alignment
296 , real_node_size
297 , payload_per_allocation
298 , min_elements_per_block
299 , hdr_size
300 , hdr_offset_size
301 , overhead_percent> helper_t;
302
303 static const std::size_t candidate_power_of_2 = initial_alignment << std::size_t(!helper_t::overhead_satisfied);
304
305 typedef typename candidate_power_of_2_ct
306 < candidate_power_of_2
307 , real_node_size
308 , payload_per_allocation
309 , min_elements_per_block
310 , hdr_size
311 , hdr_offset_size
312 , overhead_percent
313 , !helper_t::overhead_satisfied
314 >::type type;
315
316 static const std::size_t alignment = type::alignment;
317 static const std::size_t num_subblocks = type::num_subblocks;
318 static const std::size_t real_num_node = type::real_num_node;
319 };
320
321 template< std::size_t initial_alignment
322 , std::size_t real_node_size
323 , std::size_t payload_per_allocation
324 , std::size_t min_elements_per_block
325 , std::size_t hdr_size
326 , std::size_t hdr_offset_size
327 , std::size_t overhead_percent
328 >
329 struct candidate_power_of_2_ct
330 < initial_alignment
331 , real_node_size
332 , payload_per_allocation
333 , min_elements_per_block
334 , hdr_size
335 , hdr_offset_size
336 , overhead_percent
337 , false>
338 {
339 typedef candidate_power_of_2_ct
340 < initial_alignment
341 , real_node_size
342 , payload_per_allocation
343 , min_elements_per_block
344 , hdr_size
345 , hdr_offset_size
346 , overhead_percent
347 , false> type;
348
349 typedef candidate_power_of_2_ct_helper
350 < initial_alignment
351 , real_node_size
352 , payload_per_allocation
353 , min_elements_per_block
354 , hdr_size
355 , hdr_offset_size
356 , overhead_percent> helper_t;
357
358 static const std::size_t alignment = initial_alignment;
359 static const std::size_t num_subblocks = helper_t::num_b_subblock+1;
360 static const std::size_t real_num_node = helper_t::total_nodes;
361 };
362
363 /////////////////////////////////////////////
364 //
365 // candidate_power_of_2_rt
366 //
367 /////////////////////////////////////////////
368 inline void candidate_power_of_2_rt ( std::size_t initial_alignment
369 , std::size_t real_node_size
370 , std::size_t payload_per_allocation
371 , std::size_t min_elements_per_block
372 , std::size_t hdr_size
373 , std::size_t hdr_offset_size
374 , std::size_t overhead_percent
375 , std::size_t &alignment
376 , std::size_t &num_subblocks
377 , std::size_t &real_num_node)
378 {
379 bool overhead_satisfied = false;
380 std::size_t num_b_subblock = 0;
381 std::size_t total_nodes = 0;
382
383 while(!overhead_satisfied)
384 {
385 std::size_t hdr_subblock_elements_alone = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
386 std::size_t hdr_subblock_elements_first = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
387 std::size_t elements_per_b_subblock_mid = (initial_alignment - hdr_offset_size)/real_node_size;
388 std::size_t elements_per_b_subblock_end = (initial_alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
389
390 num_b_subblock =
391 hdr_subblock_elements_alone >= min_elements_per_block
392 ? 0
393 : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
394 ? 1
395 : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
396 )
397 ;
398
399 std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
400
401 total_nodes = (num_b_subblock == 0)
402 ? hdr_subblock_elements_alone
403 : ( (num_b_subblock == 1)
404 ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
405 : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
406 )
407 ;
408 std::size_t total_data = total_nodes*real_node_size;
409 std::size_t total_size = initial_alignment*(num_b_subblock+1);
410 overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
411 initial_alignment = initial_alignment << std::size_t(!overhead_satisfied);
412 }
413 alignment = initial_alignment;
414 num_subblocks = num_b_subblock+1;
415 real_num_node = total_nodes;
416 }
417
418 /////////////////////////////////////////////
419 //
420 // private_adaptive_node_pool_impl_common
421 //
422 /////////////////////////////////////////////
423 template< class SegmentManagerBase, unsigned int Flags>
424 class private_adaptive_node_pool_impl_common
425 {
426 public:
427 //!Segment manager typedef
428 typedef SegmentManagerBase segment_manager_base_type;
429 typedef typename SegmentManagerBase::multiallocation_chain multiallocation_chain;
430 typedef typename SegmentManagerBase::size_type size_type;
431 //Flags
432 //align_only
433 static const bool AlignOnly = (Flags & adaptive_pool_flag::align_only) != 0;
434 typedef bool_<AlignOnly> IsAlignOnly;
435 typedef true_ AlignOnlyTrue;
436 typedef false_ AlignOnlyFalse;
437
438 typedef typename SegmentManagerBase::void_pointer void_pointer;
439 static const typename SegmentManagerBase::
440 size_type PayloadPerAllocation = SegmentManagerBase::PayloadPerAllocation;
441
442 typedef typename boost::intrusive::pointer_traits
443 <void_pointer>::template rebind_pointer<segment_manager_base_type>::type segment_mngr_base_ptr_t;
444
445 protected:
446 typedef adaptive_pool_types
447 <multiallocation_chain, void_pointer, size_type, Flags> adaptive_pool_types_t;
448 typedef typename adaptive_pool_types_t::free_nodes_t free_nodes_t;
449 typedef typename adaptive_pool_types_t::block_info_t block_info_t;
450 typedef typename adaptive_pool_types_t::block_container_t block_container_t;
451 typedef typename adaptive_pool_types_t::block_container_traits_t block_container_traits_t;
452 typedef typename block_container_t::iterator block_iterator;
453 typedef typename block_container_t::const_iterator const_block_iterator;
454 typedef typename adaptive_pool_types_t::hdr_offset_holder hdr_offset_holder;
455 typedef private_adaptive_node_pool_impl_common this_type;
456
457 static const size_type MaxAlign = alignment_of<void_pointer>::value;
458 static const size_type HdrSize = ((sizeof(block_info_t)-1)/MaxAlign+1)*MaxAlign;
459 static const size_type HdrOffsetSize = ((sizeof(hdr_offset_holder)-1)/MaxAlign+1)*MaxAlign;
460
461 segment_mngr_base_ptr_t mp_segment_mngr_base; //Segment manager
462 block_container_t m_block_container; //Intrusive block list
463 size_type m_totally_free_blocks; //Free blocks
464
465 class block_destroyer;
466 friend class block_destroyer;
467
468 class block_destroyer
469 {
470 public:
471 block_destroyer(const this_type *impl, multiallocation_chain &chain, const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
472 : mp_impl(impl), m_chain(chain), m_num_subblocks(num_subblocks), m_real_block_alignment(real_block_alignment), m_real_num_node(real_num_node)
473 {}
474
475 void operator()(typename block_container_t::pointer to_deallocate)
476 { return this->do_destroy(to_deallocate, IsAlignOnly()); }
477
478 private:
479 void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyTrue)
480 {
481 BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
482 m_chain.push_back(to_deallocate);
483 }
484
485 void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyFalse)
486 {
487 BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
488 BOOST_ASSERT(0 == to_deallocate->hdr_offset);
489 hdr_offset_holder *hdr_off_holder =
490 mp_impl->priv_first_subblock_from_block(boost::movelib::to_raw_pointer(to_deallocate), m_num_subblocks, m_real_block_alignment);
491 m_chain.push_back(hdr_off_holder);
492 }
493
494 const this_type *mp_impl;
495 multiallocation_chain &m_chain;
496 const size_type m_num_subblocks;
497 const size_type m_real_block_alignment;
498 const size_type m_real_num_node;
499 };
500
501 //This macro will activate invariant checking. Slow, but helpful for debugging the code.
502 //#define BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
503 void priv_invariants(const size_type real_num_node, const size_type num_subblocks, const size_type real_block_alignment) const
504 {
505 (void)real_num_node; (void)num_subblocks; (void)real_block_alignment;
506 #ifdef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
507 //Check that the total totally free blocks are correct
508 BOOST_ASSERT(m_block_container.size() >= m_totally_free_blocks);
509
510 const const_block_iterator itend(m_block_container.cend());
511 const const_block_iterator itbeg(m_block_container.cbegin());
512
513 { //Try to do checks in a single iteration
514 const_block_iterator it(itbeg);
515 size_type total_free_nodes = 0;
516 size_type total_free_blocks = 0u;
517 for(; it != itend; ++it){
518 if(it != itbeg){
519 //Check order invariant
520 const_block_iterator prev(it);
521 --prev;
522 BOOST_ASSERT(!(m_block_container.key_comp()(*it, *prev)));
523 (void)prev; (void)it;
524 }
525
526 //free_nodes invariant
527 const size_type free_nodes = it->free_nodes.size();
528 BOOST_ASSERT(free_nodes <= real_num_node);
529 BOOST_ASSERT(free_nodes != 0);
530
531 //Acummulate total_free_nodes and total_free_blocks
532 total_free_nodes += free_nodes;
533 total_free_blocks += it->free_nodes.size() == real_num_node;
534
535 if (!AlignOnly) {
536 //Check that header offsets are correct
537 hdr_offset_holder *hdr_off_holder = this->priv_first_subblock_from_block(const_cast<block_info_t *>(&*it), num_subblocks, real_block_alignment);
538 for (size_type i = 0, max = num_subblocks; i < max; ++i) {
539 const size_type offset = size_type(reinterpret_cast<char*>(const_cast<block_info_t *>(&*it)) - reinterpret_cast<char*>(hdr_off_holder));
540 (void)offset;
541 BOOST_ASSERT(hdr_off_holder->hdr_offset == offset);
542 BOOST_ASSERT(0 == (reinterpret_cast<std::size_t>(hdr_off_holder) & (real_block_alignment - 1)));
543 BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
544 hdr_off_holder = move_detail::force_ptr<hdr_offset_holder *>(reinterpret_cast<char*>(hdr_off_holder) + real_block_alignment);
545 }
546 }
547 }
548 BOOST_ASSERT(total_free_blocks == m_totally_free_blocks);
549 BOOST_ASSERT(total_free_nodes >= m_totally_free_blocks*real_num_node);
550 }
551 #endif
552 }
553
554 void priv_deallocate_free_blocks( const size_type max_free_blocks, const size_type real_num_node
555 , const size_type num_subblocks, const size_type real_block_alignment)
556 { //Trampoline function to ease inlining
557 if(m_totally_free_blocks > max_free_blocks){
558 this->priv_deallocate_free_blocks_impl(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
559 }
560 }
561
562 hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment) const
563 { return this->priv_first_subblock_from_block(block, num_subblocks, real_block_alignment, IsAlignOnly()); }
564
565 hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyFalse) const
566 {
567 hdr_offset_holder *const hdr_off_holder = move_detail::force_ptr<hdr_offset_holder*>
568 (reinterpret_cast<char*>(block) - (num_subblocks-1)*real_block_alignment);
569 BOOST_ASSERT(hdr_off_holder->hdr_offset == size_type(reinterpret_cast<char*>(block) - reinterpret_cast<char*>(hdr_off_holder)));
570 BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
571 BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
572 return hdr_off_holder;
573 }
574
575 hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyTrue) const
576 {
577 (void)num_subblocks; (void)real_block_alignment;
578 return move_detail::force_ptr<hdr_offset_holder*>(block);
579 }
580
581 void priv_deallocate_free_blocks_impl( const size_type max_free_blocks, const size_type real_num_node
582 , const size_type num_subblocks, const size_type real_block_alignment)
583 {
584 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
585 //Now check if we've reached the free nodes limit
586 //and check if we have free blocks. If so, deallocate as much
587 //as we can to stay below the limit
588 multiallocation_chain chain;
589 {
590 if(Flags & adaptive_pool_flag::size_ordered){
591 const_block_iterator it = m_block_container.cend();
592 --it;
593 size_type totally_free_blocks = m_totally_free_blocks;
594
595 for( ; totally_free_blocks > max_free_blocks; --totally_free_blocks){
596 BOOST_ASSERT(it->free_nodes.size() == real_num_node);
597 void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
598 --it;
599 block_container_traits_t::erase_last(m_block_container);
600 chain.push_front(void_pointer(addr));
601 }
602 }
603 else{
604 const_block_iterator it = m_block_container.cend();
605 size_type totally_free_blocks = m_totally_free_blocks;
606
607 while(totally_free_blocks > max_free_blocks){
608 --it;
609 if(it->free_nodes.size() == real_num_node){
610 void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
611 it = m_block_container.erase(it);
612 chain.push_front(void_pointer(addr));
613 --totally_free_blocks;
614 }
615 }
616 }
617 BOOST_ASSERT((m_totally_free_blocks - max_free_blocks) == chain.size());
618 m_totally_free_blocks = max_free_blocks;
619 }
620 this->mp_segment_mngr_base->deallocate_many(chain);
621 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
622 }
623
624 void priv_fill_chain_remaining_to_block
625 ( multiallocation_chain &chain, size_type target_elem_in_chain, block_info_t &c_info
626 , char *mem_address, size_type max_node_in_mem
627 , const size_type real_node_size)
628 {
629 BOOST_ASSERT(chain.size() <= target_elem_in_chain);
630
631 //First add all possible nodes to the chain
632 const size_type left = target_elem_in_chain - chain.size();
633 const size_type add_to_chain = (max_node_in_mem < left) ? max_node_in_mem : left;
634 char *free_mem_address = static_cast<char *>(boost::movelib::to_raw_pointer
635 (chain.incorporate_after(chain.last(), void_pointer(mem_address), real_node_size, add_to_chain)));
636 //Now store remaining nodes in the free list
637 if(const size_type free = max_node_in_mem - add_to_chain){
638 free_nodes_t & free_nodes = c_info.free_nodes;
639 free_nodes.incorporate_after(free_nodes.last(), void_pointer(free_mem_address), real_node_size, free);
640 }
641 }
642
643 //!Allocates a several blocks of nodes. Can throw
644 void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
645 , const size_type max_free_blocks
646 , const size_type real_block_alignment, const size_type real_node_size
647 , const size_type real_num_node, const size_type num_subblocks
648 , AlignOnlyTrue)
649 {
650 (void)num_subblocks;
651 BOOST_ASSERT(m_block_container.empty());
652 BOOST_ASSERT(min_elements > 0);
653 const size_type n = (min_elements - 1)/real_num_node + 1;
654 const size_type real_block_size = real_block_alignment - PayloadPerAllocation;
655 const size_type target_elem_in_chain = chain.size() + min_elements;
656 for(size_type i = 0; i != n; ++i){
657 //We allocate a new NodeBlock and put it the last
658 //element of the tree
659 char *mem_address = static_cast<char*>
660 (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
661 if(!mem_address){
662 //In case of error, free memory deallocating all nodes (the new ones allocated
663 //in this function plus previously stored nodes in chain).
664 this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
665 throw_bad_alloc();
666 }
667 block_info_t &c_info = *new(mem_address, boost_container_new_t())block_info_t();
668 mem_address += HdrSize;
669 this->priv_fill_chain_remaining_to_block(chain, target_elem_in_chain, c_info, mem_address, real_num_node, real_node_size);
670 const size_type free_nodes = c_info.free_nodes.size();
671 if(free_nodes){
672 const bool is_full = free_nodes == real_num_node;
673 BOOST_ASSERT(free_nodes < real_num_node);
674 m_totally_free_blocks += static_cast<size_type>(is_full);
675 block_container_traits_t::insert_was_empty(m_block_container, c_info, is_full);
676 }
677 }
678 }
679
680 void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
681 , const size_type max_free_blocks
682 , const size_type real_block_alignment, const size_type real_node_size
683 , const size_type real_num_node, const size_type num_subblocks
684 , AlignOnlyFalse)
685 {
686 BOOST_ASSERT(m_block_container.empty());
687 BOOST_ASSERT(min_elements > 0);
688 const size_type n = (min_elements - 1)/real_num_node + 1;
689 const size_type real_block_size = real_block_alignment*num_subblocks - PayloadPerAllocation;
690 const size_type elements_per_subblock_mid = (real_block_alignment - HdrOffsetSize)/real_node_size;
691 const size_type elements_per_subblock_end = (real_block_alignment - HdrOffsetSize - PayloadPerAllocation) / real_node_size;
692 const size_type hdr_subblock_elements = (real_block_alignment - HdrSize - PayloadPerAllocation)/real_node_size;
693 const size_type target_elem_in_chain = chain.size() + min_elements;
694
695 for(size_type i = 0; i != n; ++i){
696 //We allocate a new NodeBlock and put it the last
697 //element of the tree
698 char *mem_address = static_cast<char*>
699 (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
700 if(!mem_address){
701 //In case of error, free memory deallocating all nodes (the new ones allocated
702 //in this function plus previously stored nodes in chain).
703 this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
704 throw_bad_alloc();
705 }
706 //First initialize header information on the last subblock
707 char *hdr_addr = mem_address + real_block_alignment*(num_subblocks-1);
708 block_info_t &c_info = *new(hdr_addr, boost_container_new_t())block_info_t();
709 //Some structural checks
710 BOOST_ASSERT(static_cast<void*>(&static_cast<hdr_offset_holder&>(c_info).hdr_offset) ==
711 static_cast<void*>(&c_info)); (void)c_info;
712 for( size_type subblock = 0, maxsubblock = num_subblocks - 1
713 ; subblock < maxsubblock
714 ; ++subblock, mem_address += real_block_alignment){
715 //Initialize header offset mark
716 new(mem_address, boost_container_new_t()) hdr_offset_holder(size_type(hdr_addr - mem_address));
717 const size_type elements_per_subblock = (subblock != (maxsubblock - 1)) ? elements_per_subblock_mid : elements_per_subblock_end;
718 this->priv_fill_chain_remaining_to_block
719 (chain, target_elem_in_chain, c_info, mem_address + HdrOffsetSize, elements_per_subblock, real_node_size);
720 }
721 this->priv_fill_chain_remaining_to_block
722 (chain, target_elem_in_chain, c_info, hdr_addr + HdrSize, hdr_subblock_elements, real_node_size);
723 m_totally_free_blocks += static_cast<size_type>(c_info.free_nodes.size() == real_num_node);
724 if (c_info.free_nodes.size())
725 m_block_container.push_front(c_info);
726 }
727 }
728
729 //!Allocates array of count elements. Can throw
730 void *priv_allocate_node( const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
731 , const size_type real_num_node, const size_type num_subblocks)
732 {
733 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
734 //If there are no free nodes we allocate a new block
735 if(!m_block_container.empty()){
736 //We take the first free node the multiset can't be empty
737 free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
738 BOOST_ASSERT(!free_nodes.empty());
739 const size_type free_nodes_count = free_nodes.size();
740 void *first_node = boost::movelib::to_raw_pointer(free_nodes.pop_front());
741 if(free_nodes.empty()){
742 block_container_traits_t::erase_first(m_block_container);
743 }
744 m_totally_free_blocks -= static_cast<size_type>(free_nodes_count == real_num_node);
745 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
746 return first_node;
747 }
748 else{
749 multiallocation_chain chain;
750 this->priv_append_from_new_blocks
751 (1, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
752 void *node = boost::movelib::to_raw_pointer(chain.pop_front());
753 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
754 return node;
755 }
756 }
757
758 void priv_allocate_nodes( const size_type n, multiallocation_chain &chain
759 , const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
760 , const size_type real_num_node, const size_type num_subblocks)
761 {
762 size_type i = 0;
763 BOOST_TRY{
764 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
765 while(i != n){
766 //If there are no free nodes we allocate all needed blocks
767 if (m_block_container.empty()){
768 this->priv_append_from_new_blocks
769 (n - i, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
770 BOOST_ASSERT(m_block_container.empty() || (++m_block_container.cbegin() == m_block_container.cend()));
771 BOOST_ASSERT(chain.size() == n);
772 break;
773 }
774 free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
775 const size_type free_nodes_count_before = free_nodes.size();
776 m_totally_free_blocks -= static_cast<size_type>(free_nodes_count_before == real_num_node);
777 const size_type num_left = n-i;
778 const size_type num_elems = (num_left < free_nodes_count_before) ? num_left : free_nodes_count_before;
779 typedef typename free_nodes_t::iterator free_nodes_iterator;
780
781 if(num_left < free_nodes_count_before){
782 const free_nodes_iterator it_bbeg(free_nodes.before_begin());
783 free_nodes_iterator it_bend(it_bbeg);
784 for(size_type j = 0; j != num_elems; ++j){
785 ++it_bend;
786 }
787 free_nodes_iterator it_end = it_bend; ++it_end;
788 free_nodes_iterator it_beg = it_bbeg; ++it_beg;
789 free_nodes.erase_after(it_bbeg, it_end, num_elems);
790 chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
791 //chain.splice_after(chain.last(), free_nodes, it_bbeg, it_bend, num_elems);
792 BOOST_ASSERT(!free_nodes.empty());
793 }
794 else{
795 const free_nodes_iterator it_beg(free_nodes.begin()), it_bend(free_nodes.last());
796 free_nodes.clear();
797 chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
798 block_container_traits_t::erase_first(m_block_container);
799 }
800 i += num_elems;
801 }
802 }
803 BOOST_CATCH(...){
804 this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
805 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
806 BOOST_RETHROW
807 }
808 BOOST_CATCH_END
809 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
810 }
811
812 //!Deallocates an array pointed by ptr. Never throws
813 void priv_deallocate_node( void *pElem
814 , const size_type max_free_blocks, const size_type real_num_node
815 , const size_type num_subblocks, const size_type real_block_alignment)
816 {
817 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
818 block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
819 const size_type prev_free_nodes = block_info.free_nodes.size();
820 BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
821
822 //We put the node at the beginning of the free node list
823 block_info.free_nodes.push_back(void_pointer(pElem));
824
825 //The loop reinserts all blocks except the last one
826 this->priv_reinsert_block(block_info, prev_free_nodes == 0, real_num_node);
827 this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
828 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
829 }
830
831 void priv_deallocate_nodes( multiallocation_chain &nodes
832 , const size_type max_free_blocks, const size_type real_num_node
833 , const size_type num_subblocks, const size_type real_block_alignment)
834 {
835 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
836 //To take advantage of node locality, wait until two
837 //nodes belong to different blocks. Only then reinsert
838 //the block of the first node in the block tree.
839 //Cache of the previous block
840 block_info_t *prev_block_info = 0;
841
842 //If block was empty before this call, it's not already
843 //inserted in the block tree.
844 bool prev_block_was_empty = false;
845 typedef typename free_nodes_t::iterator free_nodes_iterator;
846 {
847 const free_nodes_iterator itbb(nodes.before_begin()), ite(nodes.end());
848 free_nodes_iterator itf(nodes.begin()), itbf(itbb);
849 size_type splice_node_count = size_type(-1);
850 while(itf != ite){
851 void *pElem = boost::movelib::to_raw_pointer(boost::movelib::iterator_to_raw_pointer(itf));
852 block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
853 BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
854 ++splice_node_count;
855
856 //If block change is detected calculate the cached block position in the tree
857 if(&block_info != prev_block_info){
858 if(prev_block_info){ //Make sure we skip the initial "dummy" cache
859 free_nodes_iterator it(itbb); ++it;
860 nodes.erase_after(itbb, itf, splice_node_count);
861 prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*it, &*itbf, splice_node_count);
862 this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
863 splice_node_count = 0;
864 }
865 //Update cache with new data
866 prev_block_was_empty = block_info.free_nodes.empty();
867 prev_block_info = &block_info;
868 }
869 itbf = itf;
870 ++itf;
871 }
872 }
873 if(prev_block_info){
874 //The loop reinserts all blocks except the last one
875 const free_nodes_iterator itfirst(nodes.begin()), itlast(nodes.last());
876 const size_type splice_node_count = nodes.size();
877 nodes.clear();
878 prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*itfirst, &*itlast, splice_node_count);
879 this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
880 this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
881 }
882 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
883 }
884
885 void priv_reinsert_block(block_info_t &prev_block_info, const bool prev_block_was_empty, const size_type real_num_node)
886 {
887 //Cache the free nodes from the block
888 const size_type this_block_free_nodes = prev_block_info.free_nodes.size();
889 const bool is_full = this_block_free_nodes == real_num_node;
890
891 //Update free block count
892 m_totally_free_blocks += static_cast<size_type>(is_full);
893 if(prev_block_was_empty){
894 block_container_traits_t::insert_was_empty(m_block_container, prev_block_info, is_full);
895 }
896 else{
897 block_container_traits_t::reinsert_was_used(m_block_container, prev_block_info, is_full);
898 }
899 }
900
901 block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyFalse) const
902 {
903 hdr_offset_holder *hdr_off_holder =
904 reinterpret_cast<hdr_offset_holder*>((std::size_t)node & size_type(~(real_block_alignment - 1)));
905 BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
906 BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
907 block_info_t *block = move_detail::force_ptr<block_info_t *>
908 (reinterpret_cast<char*>(hdr_off_holder) + hdr_off_holder->hdr_offset);
909 BOOST_ASSERT(block->hdr_offset == 0);
910 return block;
911 }
912
913 block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyTrue) const
914 {
915 return (block_info_t *)((std::size_t)node & std::size_t(~(real_block_alignment - 1)));
916 }
917
918 block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment) const
919 { return this->priv_block_from_node(node, real_block_alignment, IsAlignOnly()); }
920
921 //!Deallocates all used memory. Never throws
922 void priv_clear(const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
923 {
924 #ifndef NDEBUG
925 block_iterator it = m_block_container.begin();
926 block_iterator itend = m_block_container.end();
927 size_type n_free_nodes = 0;
928 for(; it != itend; ++it){
929 //Check for memory leak
930 BOOST_ASSERT(it->free_nodes.size() == real_num_node);
931 ++n_free_nodes;
932 }
933 BOOST_ASSERT(n_free_nodes == m_totally_free_blocks);
934 #endif
935 //Check for memory leaks
936 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
937 multiallocation_chain chain;
938 m_block_container.clear_and_dispose(block_destroyer(this, chain, num_subblocks, real_block_alignment, real_num_node));
939 this->mp_segment_mngr_base->deallocate_many(chain);
940 m_totally_free_blocks = 0;
941 this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
942 }
943
944 public:
945 private_adaptive_node_pool_impl_common(segment_manager_base_type *segment_mngr_base)
946 //General purpose allocator
947 : mp_segment_mngr_base(segment_mngr_base)
948 , m_block_container()
949 , m_totally_free_blocks(0)
950 {}
951
952 size_type num_free_nodes()
953 {
954 typedef typename block_container_t::const_iterator citerator;
955 size_type count = 0;
956 citerator it (m_block_container.begin()), itend(m_block_container.end());
957 for(; it != itend; ++it){
958 count += it->free_nodes.size();
959 }
960 return count;
961 }
962
963 void swap(private_adaptive_node_pool_impl_common &other)
964 {
965 std::swap(mp_segment_mngr_base, other.mp_segment_mngr_base);
966 std::swap(m_totally_free_blocks, other.m_totally_free_blocks);
967 m_block_container.swap(other.m_block_container);
968 }
969
970 //!Returns the segment manager. Never throws
971 segment_manager_base_type* get_segment_manager_base()const
972 { return boost::movelib::to_raw_pointer(mp_segment_mngr_base); }
973 };
974
975 template< class SizeType
976 , std::size_t HdrSize
977 , std::size_t PayloadPerAllocation
978 , std::size_t RealNodeSize
979 , std::size_t NodesPerBlock
980 , std::size_t HdrOffsetSize
981 , std::size_t OverheadPercent
982 , bool AlignOnly>
983 struct calculate_alignment_ct
984 {
985 static const std::size_t alignment = upper_power_of_2_ct<SizeType, HdrSize + RealNodeSize*NodesPerBlock>::value;
986 static const std::size_t num_subblocks = 0;
987 static const std::size_t real_num_node = (alignment - PayloadPerAllocation - HdrSize)/RealNodeSize;
988 };
989
990 template< class SizeType
991 , std::size_t HdrSize
992 , std::size_t PayloadPerAllocation
993 , std::size_t RealNodeSize
994 , std::size_t NodesPerBlock
995 , std::size_t HdrOffsetSize
996 , std::size_t OverheadPercent>
997 struct calculate_alignment_ct
998 < SizeType
999 , HdrSize
1000 , PayloadPerAllocation
1001 , RealNodeSize
1002 , NodesPerBlock
1003 , HdrOffsetSize
1004 , OverheadPercent
1005 , false>
1006 {
1007 typedef typename candidate_power_of_2_ct
1008 < upper_power_of_2_ct<SizeType, HdrSize + PayloadPerAllocation + RealNodeSize>::value
1009 , RealNodeSize
1010 , PayloadPerAllocation
1011 , NodesPerBlock
1012 , HdrSize
1013 , HdrOffsetSize
1014 , OverheadPercent
1015 >::type type;
1016
1017 static const std::size_t alignment = type::alignment;
1018 static const std::size_t num_subblocks = type::num_subblocks;
1019 static const std::size_t real_num_node = type::real_num_node;
1020 };
1021
1022
1023 /////////////////////////////////////////////
1024 //
1025 // private_adaptive_node_pool_impl_ct
1026 //
1027 /////////////////////////////////////////////
1028 template< class SegmentManagerBase
1029 , std::size_t MaxFreeBlocks
1030 , std::size_t NodeSize
1031 , std::size_t NodesPerBlock
1032 , std::size_t OverheadPercent
1033 , unsigned int Flags>
1034 class private_adaptive_node_pool_impl_ct
1035 : public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
1036 {
1037 typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> base_t;
1038
1039 //Non-copyable
1040 private_adaptive_node_pool_impl_ct();
1041 private_adaptive_node_pool_impl_ct(const private_adaptive_node_pool_impl_ct &);
1042 private_adaptive_node_pool_impl_ct &operator=(const private_adaptive_node_pool_impl_ct &);
1043
1044 public:
1045 typedef typename base_t::void_pointer void_pointer;
1046 typedef typename base_t::size_type size_type;
1047 typedef typename base_t::multiallocation_chain multiallocation_chain;
1048 typedef typename base_t::segment_manager_base_type segment_manager_base_type;
1049
1050 static const typename base_t::size_type PayloadPerAllocation = base_t::PayloadPerAllocation;
1051
1052 //align_only
1053 static const bool AlignOnly = base_t::AlignOnly;
1054
1055 private:
1056 static const size_type MaxAlign = base_t::MaxAlign;
1057 static const size_type HdrSize = base_t::HdrSize;
1058 static const size_type HdrOffsetSize = base_t::HdrOffsetSize;
1059
1060 static const size_type RealNodeSize = lcm_ct<NodeSize, alignment_of<void_pointer>::value>::value;
1061
1062 typedef calculate_alignment_ct
1063 < size_type, HdrSize, PayloadPerAllocation
1064 , RealNodeSize, NodesPerBlock, HdrOffsetSize, OverheadPercent, AlignOnly> data_t;
1065
1066 //Round the size to a power of two value.
1067 //This is the total memory size (including payload) that we want to
1068 //allocate from the general-purpose allocator
1069 static const size_type NumSubBlocks = data_t::num_subblocks;
1070 static const size_type RealNumNode = data_t::real_num_node;
1071 static const size_type RealBlockAlignment = data_t::alignment;
1072
1073 public:
1074
1075 //!Constructor from a segment manager. Never throws
1076 private_adaptive_node_pool_impl_ct(typename base_t::segment_manager_base_type *segment_mngr_base)
1077 //General purpose allocator
1078 : base_t(segment_mngr_base)
1079 {}
1080
1081 //!Destructor. Deallocates all allocated blocks. Never throws
1082 ~private_adaptive_node_pool_impl_ct()
1083 { this->priv_clear(NumSubBlocks, data_t::alignment, RealNumNode); }
1084
1085 size_type get_real_num_node() const
1086 { return RealNumNode; }
1087
1088 //!Allocates array of count elements. Can throw
1089 void *allocate_node()
1090 {
1091 return this->priv_allocate_node
1092 (MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
1093 }
1094
1095 //!Allocates n nodes.
1096 //!Can throw
1097 void allocate_nodes(const size_type n, multiallocation_chain &chain)
1098 {
1099 this->priv_allocate_nodes
1100 (n, chain, MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
1101 }
1102
1103 //!Deallocates an array pointed by ptr. Never throws
1104 void deallocate_node(void *pElem)
1105 {
1106 this->priv_deallocate_node(pElem, MaxFreeBlocks, RealNumNode, NumSubBlocks, RealBlockAlignment);
1107 }
1108
1109 //!Deallocates a linked list of nodes. Never throws
1110 void deallocate_nodes(multiallocation_chain &nodes)
1111 {
1112 this->priv_deallocate_nodes(nodes, MaxFreeBlocks, RealNumNode, NumSubBlocks, data_t::alignment);
1113 }
1114
1115 void deallocate_free_blocks()
1116 { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
1117
1118 //Deprecated, use deallocate_free_blocks
1119 void deallocate_free_chunks()
1120 { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
1121 };
1122
1123 /////////////////////////////////////////////
1124 //
1125 // private_adaptive_node_pool_impl_rt
1126 //
1127 /////////////////////////////////////////////
1128 template<class SizeType>
1129 struct private_adaptive_node_pool_impl_rt_data
1130 {
1131 typedef SizeType size_type;
1132
1133 private_adaptive_node_pool_impl_rt_data(size_type max_free_blocks, size_type real_node_size)
1134 : m_max_free_blocks(max_free_blocks), m_real_node_size(real_node_size)
1135 , m_real_block_alignment(), m_num_subblocks(), m_real_num_node()
1136 {}
1137
1138 const size_type m_max_free_blocks;
1139 const size_type m_real_node_size;
1140 //Round the size to a power of two value.
1141 //This is the total memory size (including payload) that we want to
1142 //allocate from the general-purpose allocator
1143 size_type m_real_block_alignment;
1144 size_type m_num_subblocks;
1145 //This is the real number of nodes per block
1146 size_type m_real_num_node;
1147 };
1148
1149
1150 template<class SegmentManagerBase, unsigned int Flags>
1151 class private_adaptive_node_pool_impl_rt
1152 : private private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type>
1153 , public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
1154 {
1155 typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> impl_t;
1156 typedef private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type> data_t;
1157
1158 //Non-copyable
1159 private_adaptive_node_pool_impl_rt();
1160 private_adaptive_node_pool_impl_rt(const private_adaptive_node_pool_impl_rt &);
1161 private_adaptive_node_pool_impl_rt &operator=(const private_adaptive_node_pool_impl_rt &);
1162
1163 protected:
1164
1165 typedef typename impl_t::void_pointer void_pointer;
1166 typedef typename impl_t::size_type size_type;
1167 typedef typename impl_t::multiallocation_chain multiallocation_chain;
1168
1169 static const typename impl_t::size_type PayloadPerAllocation = impl_t::PayloadPerAllocation;
1170
1171 //Flags
1172 //align_only
1173 static const bool AlignOnly = impl_t::AlignOnly;
1174
1175 static const size_type HdrSize = impl_t::HdrSize;
1176 static const size_type HdrOffsetSize = impl_t::HdrOffsetSize;
1177
1178 public:
1179
1180 //!Segment manager typedef
1181 typedef SegmentManagerBase segment_manager_base_type;
1182
1183 //!Constructor from a segment manager. Never throws
1184 private_adaptive_node_pool_impl_rt
1185 ( segment_manager_base_type *segment_mngr_base
1186 , size_type node_size
1187 , size_type nodes_per_block
1188 , size_type max_free_blocks
1189 , unsigned char overhead_percent
1190 )
1191 : data_t(max_free_blocks, lcm(node_size, size_type(alignment_of<void_pointer>::value)))
1192 , impl_t(segment_mngr_base)
1193 {
1194 if(AlignOnly){
1195 this->m_real_block_alignment = upper_power_of_2(HdrSize + this->m_real_node_size*nodes_per_block);
1196 this->m_real_num_node = (this->m_real_block_alignment - PayloadPerAllocation - HdrSize)/this->m_real_node_size;
1197 }
1198 else{
1199 candidate_power_of_2_rt ( upper_power_of_2(HdrSize + PayloadPerAllocation + this->m_real_node_size)
1200 , this->m_real_node_size
1201 , PayloadPerAllocation
1202 , nodes_per_block
1203 , HdrSize
1204 , HdrOffsetSize
1205 , overhead_percent
1206 , this->m_real_block_alignment
1207 , this->m_num_subblocks
1208 , this->m_real_num_node);
1209 }
1210 }
1211
1212 //!Destructor. Deallocates all allocated blocks. Never throws
1213 ~private_adaptive_node_pool_impl_rt()
1214 { this->priv_clear(this->m_num_subblocks, this->m_real_block_alignment, this->m_real_num_node); }
1215
1216 size_type get_real_num_node() const
1217 { return this->m_real_num_node; }
1218
1219 //!Allocates array of count elements. Can throw
1220 void *allocate_node()
1221 {
1222 return this->priv_allocate_node
1223 (this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
1224 }
1225
1226 //!Allocates n nodes.
1227 //!Can throw
1228 void allocate_nodes(const size_type n, multiallocation_chain &chain)
1229 {
1230
1231 this->priv_allocate_nodes
1232 (n, chain, this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
1233 }
1234
1235 //!Deallocates an array pointed by ptr. Never throws
1236 void deallocate_node(void *pElem)
1237 {
1238 this->priv_deallocate_node(pElem, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
1239 }
1240
1241 //!Deallocates a linked list of nodes. Never throws
1242 void deallocate_nodes(multiallocation_chain &nodes)
1243 {
1244 this->priv_deallocate_nodes(nodes, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
1245 }
1246
1247 void deallocate_free_blocks()
1248 { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
1249
1250 //Deprecated, use deallocate_free_blocks
1251 void deallocate_free_chunks()
1252 { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
1253 };
1254
1255 } //namespace dtl {
1256 } //namespace container {
1257 } //namespace boost {
1258
1259 #include <boost/container/detail/config_end.hpp>
1260
1261 #endif //#ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP