]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/pool/test/test_simple_seg_storage.cpp
update sources to v12.2.3
[ceph.git] / ceph / src / boost / libs / pool / test / test_simple_seg_storage.cpp
1 /* Copyright (C) 2011 Kwan Ting Chan
2 *
3 * Use, modification and distribution is subject to the
4 * Boost Software License, Version 1.0. (See accompanying
5 * file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
6 */
7
8 #include "test_simple_seg_storage.hpp"
9 #include "track_allocator.hpp"
10 #include "random_shuffle.hpp"
11
12 #include <boost/pool/simple_segregated_storage.hpp>
13 #include <boost/assert.hpp>
14 #include <boost/integer/common_factor_ct.hpp>
15 #if defined(BOOST_MSVC) && (BOOST_MSVC == 1400)
16 #pragma warning(push)
17 #pragma warning(disable:4244)
18 #endif
19 #include <boost/random/mersenne_twister.hpp>
20 #include <boost/random/uniform_int.hpp>
21 #include <boost/random/variate_generator.hpp>
22 #if defined(BOOST_MSVC) && (BOOST_MSVC == 1400)
23 #pragma warning(pop)
24 #endif
25
26 #include <boost/detail/lightweight_test.hpp>
27
28 #include <algorithm>
29 #include <functional>
30 #include <set>
31 #include <vector>
32
33 #include <cstddef>
34 #include <cstdlib>
35 #include <ctime>
36
37 #ifdef BOOST_MSVC
38 #pragma warning(disable:4267)
39 #endif
40
41 // "A free list is ordered if repeated calls to malloc() will result in a
42 // constantly-increasing sequence of values, as determined by std::less<void*>"
43 // Return: true if in constantly-increasing order, false otherwise
44 bool check_is_order(const std::vector<void*>& vs)
45 {
46 if(vs.size() < 2) { return true; }
47
48 void *lower, *higher;
49 std::vector<void*>::const_iterator ci = vs.begin();
50 lower = *(ci++);
51 while(ci != vs.end())
52 {
53 higher = *(ci++);
54 if(!std::less<void*>()(lower, higher)) { return false; }
55 }
56
57 return true;
58 }
59
60 // Return: number of chunks malloc'd from store
61 std::size_t test_is_order(test_simp_seg_store& store)
62 {
63 std::vector<void*> vpv;
64 std::size_t nchunk = 0;
65 // Pre: !empty()
66 while(!store.empty())
67 {
68 void* const first = store.get_first();
69 void* const pv = store.malloc();
70 // "Takes the first available chunk from the free list
71 // and returns it"
72 BOOST_TEST(first == pv);
73
74 vpv.push_back(pv);
75 ++nchunk;
76 }
77 BOOST_TEST(check_is_order(vpv));
78
79 return nchunk;
80 }
81
82 boost::mt19937 gen;
83
84 int main()
85 {
86 std::srand(static_cast<unsigned>(std::time(0)));
87 gen.seed(static_cast<boost::uint32_t>(std::time(0)));
88
89 /* Store::segregate(block, sz, partition_sz, end) */
90 std::size_t partition_sz
91 = boost::integer::static_lcm<sizeof(void*), sizeof(int)>::value;
92 boost::uniform_int<> dist(partition_sz, 10000);
93 boost::variate_generator<boost::mt19937&,
94 boost::uniform_int<> > die(gen, dist);
95 std::size_t block_size = die();
96 // Pre: npartition_sz >= sizeof(void*)
97 // npartition_sz = sizeof(void*) * i, for some integer i
98 // nsz >= npartition_sz
99 // block is properly aligned for an array of object of
100 // size npartition_sz and array of void *
101 BOOST_ASSERT(partition_sz >= sizeof(void*));
102 BOOST_ASSERT(partition_sz % sizeof(void*) == 0);
103 BOOST_ASSERT(block_size >= partition_sz);
104 {
105 char* const pc = track_allocator::malloc(block_size);
106 // (Test) Pre: block of memory is valid
107 BOOST_ASSERT(pc);
108 int endadd = 0;
109 void* const pvret = test_simp_seg_store::segregate(pc, block_size,
110 partition_sz, &endadd);
111
112 // The first chunk "is always equal to block"
113 BOOST_TEST(pvret == pc);
114
115 void* cur = test_simp_seg_store::get_nextof(static_cast<int*>(pvret));
116 void* last = pvret;
117 std::size_t nchunk = 1;
118 while(cur != &endadd)
119 {
120 ++nchunk;
121
122 // Memory of each chunk does not overlap
123 // The free list constructed is actually from the given block
124 // The "interleaved free list is ordered"
125 BOOST_TEST(std::less_equal<void*>()(static_cast<char*>(last)
126 + partition_sz, cur));
127 BOOST_TEST(std::less_equal<void*>()(static_cast<char*>(cur)
128 + partition_sz, pc + block_size));
129
130 last = cur;
131 cur = test_simp_seg_store::get_nextof(static_cast<int*>(cur));
132 }
133 // "The last chunk is set to point to end"
134 // "Partitioning into as many partition_sz-sized chunks as possible"
135 BOOST_TEST(nchunk == block_size/partition_sz);
136 }
137
138 /* t.add_block(block, sz, partition_sz), t.malloc() */
139 {
140 // Default constructor of simple_segregated_storage do nothing
141 test_simp_seg_store tstore;
142 // Post: empty()
143 BOOST_TEST(tstore.empty());
144
145 char* const pc = track_allocator::malloc(block_size);
146 tstore.add_block(pc, block_size, partition_sz);
147
148 // The first chunk "is always equal to block"
149 BOOST_TEST(tstore.get_first() == pc);
150
151 // Empty before add_block() => "is ordered after"
152 std::size_t nchunk = test_is_order(tstore);
153 // "Partitioning into as many partition_sz-sized chunks as possible"
154 BOOST_TEST(nchunk == block_size/partition_sz);
155
156 BOOST_ASSERT(partition_sz <= 23);
157 test_simp_seg_store tstore2;
158 char* const pc2 = track_allocator::malloc(75);
159 tstore2.add_block(pc2, 24, partition_sz);
160 tstore2.add_block(pc2 + 49, 24, partition_sz);
161 tstore2.add_block(pc2 + 25, 24, partition_sz);
162 tstore2.add_block(track_allocator::malloc(23), 23, partition_sz);
163 std::size_t nchunk_ref = (3*(24/partition_sz)) + (23/partition_sz);
164 for(nchunk = 0; !tstore2.empty(); tstore2.malloc(), ++nchunk) {}
165 // add_block() merges new free list to existing
166 BOOST_TEST(nchunk == nchunk_ref);
167 }
168
169 /* t.free(chunk) */
170 {
171 test_simp_seg_store tstore;
172 char* const pc = track_allocator::malloc(partition_sz);
173 tstore.add_block(pc, partition_sz, partition_sz);
174 void* pv = tstore.malloc();
175 BOOST_TEST(tstore.empty());
176 tstore.free(pv);
177 }
178
179 /* t.add_ordered_block(block, sz, partition_sz) */
180 {
181 {
182 char* const pc = track_allocator::malloc(6 * partition_sz);
183 std::vector<void*> vpv;
184 vpv.push_back(pc);
185 vpv.push_back(pc + (2 * partition_sz));
186 vpv.push_back(pc + (4 * partition_sz));
187
188 do
189 {
190 test_simp_seg_store tstore;
191 tstore.add_ordered_block(vpv[0], 2*partition_sz, partition_sz);
192 tstore.add_ordered_block(vpv[1], 2*partition_sz, partition_sz);
193 tstore.add_ordered_block(vpv[2], 2*partition_sz, partition_sz);
194 // "Order-preserving"
195 test_is_order(tstore);
196 } while(std::next_permutation(vpv.begin(), vpv.end()));
197 }
198
199 {
200 test_simp_seg_store tstore;
201 char* const pc = track_allocator::malloc(6 * partition_sz);
202 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
203 tstore.add_ordered_block(pc + (4 * partition_sz),
204 (2 * partition_sz), partition_sz);
205 // "Order-preserving"
206 test_is_order(tstore);
207 }
208
209 {
210 test_simp_seg_store tstore;
211 char* const pc = track_allocator::malloc(6 * partition_sz);
212 tstore.add_ordered_block(pc + (4 * partition_sz),
213 (2 * partition_sz), partition_sz);
214 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
215 // "Order-preserving"
216 test_is_order(tstore);
217 }
218 }
219
220 /* t.ordered_free(chunk) */
221 {
222 char* const pc = track_allocator::malloc(6 * partition_sz);
223
224 test_simp_seg_store tstore;
225 tstore.add_block(pc, 6 * partition_sz, partition_sz);
226
227 std::vector<void*> vpv;
228 for(std::size_t i=0; i < 6; ++i) { vpv.push_back(tstore.malloc()); }
229 BOOST_ASSERT(tstore.empty());
230 pool_test_random_shuffle(vpv.begin(), vpv.end());
231
232 for(std::size_t i=0; i < 6; ++i)
233 {
234 tstore.ordered_free(vpv[i]);
235 }
236 // "Order-preserving"
237 test_is_order(tstore);
238 }
239
240 /* t.malloc_n(n, partition_sz) */
241 {
242 {
243 char* const pc = track_allocator::malloc(12 * partition_sz);
244 test_simp_seg_store tstore;
245 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
246 tstore.add_ordered_block(pc + (3 * partition_sz),
247 3 * partition_sz, partition_sz);
248 tstore.add_ordered_block(pc + (7 * partition_sz),
249 5 * partition_sz, partition_sz);
250
251 void* pvret = tstore.malloc_n(6, partition_sz);
252 BOOST_TEST(pvret == 0);
253
254 pvret = tstore.malloc_n(0, partition_sz);
255 // There's no prohibition against asking for zero elements
256 BOOST_TEST(pvret == 0);
257
258 pvret = tstore.malloc_n(3, partition_sz);
259 // Implicit assumption that contiguous sequence found is the first
260 // available while traversing from the start of the free list
261 BOOST_TEST(pvret == pc + (3 * partition_sz));
262
263 pvret = tstore.malloc_n(4, partition_sz);
264 BOOST_TEST(pvret == pc + (7 * partition_sz));
265
266 // There should still be two contiguous
267 // and one non-contiguous chunk left
268 std::size_t nchunks = 0;
269 while(!tstore.empty())
270 {
271 tstore.malloc();
272 ++nchunks;
273 }
274 BOOST_TEST(nchunks == 3);
275 }
276
277 {
278 char* const pc = track_allocator::malloc(12 * partition_sz);
279 test_simp_seg_store tstore;
280 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
281 tstore.add_ordered_block(pc + (3 * partition_sz),
282 3 * partition_sz, partition_sz);
283 tstore.add_ordered_block(pc + (7 * partition_sz),
284 5 * partition_sz, partition_sz);
285
286 tstore.malloc_n(3, partition_sz);
287 // "Order-preserving"
288 test_is_order(tstore);
289 }
290 }
291
292 for(std::set<char*>::iterator itr
293 = track_allocator::allocated_blocks.begin();
294 itr != track_allocator::allocated_blocks.end();
295 ++itr)
296 {
297 delete [] *itr;
298 }
299 track_allocator::allocated_blocks.clear();
300 }