]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/pool/test/test_simple_seg_storage.cpp
import new upstream nautilus stable release 14.2.8
[ceph.git] / ceph / src / boost / libs / pool / test / test_simple_seg_storage.cpp
1 /* Copyright (C) 2011 Kwan Ting Chan
2 *
3 * Use, modification and distribution is subject to the
4 * Boost Software License, Version 1.0. (See accompanying
5 * file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
6 */
7
8 #include "test_simple_seg_storage.hpp"
9 #include "track_allocator.hpp"
10 #include "random_shuffle.hpp"
11
12 #include <boost/pool/simple_segregated_storage.hpp>
13 #include <boost/assert.hpp>
14 #include <boost/integer/common_factor_ct.hpp>
15 #if defined(BOOST_MSVC) && (BOOST_MSVC <= 1600)
16 #pragma warning(push)
17 #pragma warning(disable: 4244)
18 // ..\..\boost/random/uniform_int_distribution.hpp(171) :
19 // warning C4127: conditional expression is constant
20 #pragma warning(disable: 4127)
21 #endif
22 #include <boost/random/mersenne_twister.hpp>
23 #include <boost/random/uniform_int.hpp>
24 #include <boost/random/variate_generator.hpp>
25 #if defined(BOOST_MSVC) && (BOOST_MSVC <= 1600)
26 #pragma warning(pop)
27 #endif
28
29 #include <boost/core/lightweight_test.hpp>
30
31 #include <algorithm>
32 #include <functional>
33 #include <set>
34 #include <vector>
35
36 #include <cstddef>
37 #include <cstdlib>
38 #include <ctime>
39
40 #ifdef BOOST_MSVC
41 #pragma warning(disable:4267)
42 #endif
43
44 // "A free list is ordered if repeated calls to malloc() will result in a
45 // constantly-increasing sequence of values, as determined by std::less<void*>"
46 // Return: true if in constantly-increasing order, false otherwise
47 bool check_is_order(const std::vector<void*>& vs)
48 {
49 if(vs.size() < 2) { return true; }
50
51 void *lower, *higher;
52 std::vector<void*>::const_iterator ci = vs.begin();
53 lower = *(ci++);
54 while(ci != vs.end())
55 {
56 higher = *(ci++);
57 if(!std::less<void*>()(lower, higher)) { return false; }
58 }
59
60 return true;
61 }
62
63 // Return: number of chunks malloc'd from store
64 std::size_t test_is_order(test_simp_seg_store& store)
65 {
66 std::vector<void*> vpv;
67 std::size_t nchunk = 0;
68 // Pre: !empty()
69 while(!store.empty())
70 {
71 void* const first = store.get_first();
72 void* const pv = store.malloc();
73 // "Takes the first available chunk from the free list
74 // and returns it"
75 BOOST_TEST(first == pv);
76
77 vpv.push_back(pv);
78 ++nchunk;
79 }
80 BOOST_TEST(check_is_order(vpv));
81
82 return nchunk;
83 }
84
85 boost::mt19937 gen;
86
87 int main()
88 {
89 std::srand(static_cast<unsigned>(std::time(0)));
90 gen.seed(static_cast<boost::uint32_t>(std::time(0)));
91
92 /* Store::segregate(block, sz, partition_sz, end) */
93 std::size_t partition_sz
94 = boost::integer::static_lcm<sizeof(void*), sizeof(int)>::value;
95 boost::uniform_int<> dist(partition_sz, 10000);
96 boost::variate_generator<boost::mt19937&,
97 boost::uniform_int<> > die(gen, dist);
98 std::size_t block_size = die();
99 // Pre: npartition_sz >= sizeof(void*)
100 // npartition_sz = sizeof(void*) * i, for some integer i
101 // nsz >= npartition_sz
102 // block is properly aligned for an array of object of
103 // size npartition_sz and array of void *
104 BOOST_ASSERT(partition_sz >= sizeof(void*));
105 BOOST_ASSERT(partition_sz % sizeof(void*) == 0);
106 BOOST_ASSERT(block_size >= partition_sz);
107 {
108 char* const pc = track_allocator::malloc(block_size);
109 // (Test) Pre: block of memory is valid
110 BOOST_ASSERT(pc);
111 int endadd = 0;
112 void* const pvret = test_simp_seg_store::segregate(pc, block_size,
113 partition_sz, &endadd);
114
115 // The first chunk "is always equal to block"
116 BOOST_TEST(pvret == pc);
117
118 void* cur = test_simp_seg_store::get_nextof(static_cast<int*>(pvret));
119 void* last = pvret;
120 std::size_t nchunk = 1;
121 while(cur != &endadd)
122 {
123 ++nchunk;
124
125 // Memory of each chunk does not overlap
126 // The free list constructed is actually from the given block
127 // The "interleaved free list is ordered"
128 BOOST_TEST(std::less_equal<void*>()(static_cast<char*>(last)
129 + partition_sz, cur));
130 BOOST_TEST(std::less_equal<void*>()(static_cast<char*>(cur)
131 + partition_sz, pc + block_size));
132
133 last = cur;
134 cur = test_simp_seg_store::get_nextof(static_cast<int*>(cur));
135 }
136 // "The last chunk is set to point to end"
137 // "Partitioning into as many partition_sz-sized chunks as possible"
138 BOOST_TEST(nchunk == block_size/partition_sz);
139 }
140
141 /* t.add_block(block, sz, partition_sz), t.malloc() */
142 {
143 // Default constructor of simple_segregated_storage do nothing
144 test_simp_seg_store tstore;
145 // Post: empty()
146 BOOST_TEST(tstore.empty());
147
148 char* const pc = track_allocator::malloc(block_size);
149 tstore.add_block(pc, block_size, partition_sz);
150
151 // The first chunk "is always equal to block"
152 BOOST_TEST(tstore.get_first() == pc);
153
154 // Empty before add_block() => "is ordered after"
155 std::size_t nchunk = test_is_order(tstore);
156 // "Partitioning into as many partition_sz-sized chunks as possible"
157 BOOST_TEST(nchunk == block_size/partition_sz);
158
159 BOOST_ASSERT(partition_sz <= 23);
160 test_simp_seg_store tstore2;
161 char* const pc2 = track_allocator::malloc(88);
162 tstore2.add_block(pc2, 24, partition_sz);
163 tstore2.add_block(pc2 + 64, 24, partition_sz);
164 tstore2.add_block(pc2 + 32, 24, partition_sz);
165 tstore2.add_block(track_allocator::malloc(23), 23, partition_sz);
166 std::size_t nchunk_ref = (3*(24/partition_sz)) + (23/partition_sz);
167 for(nchunk = 0; !tstore2.empty(); tstore2.malloc(), ++nchunk) {}
168 // add_block() merges new free list to existing
169 BOOST_TEST(nchunk == nchunk_ref);
170 }
171
172 /* t.free(chunk) */
173 {
174 test_simp_seg_store tstore;
175 char* const pc = track_allocator::malloc(partition_sz);
176 tstore.add_block(pc, partition_sz, partition_sz);
177 void* pv = tstore.malloc();
178 BOOST_TEST(tstore.empty());
179 tstore.free(pv);
180 }
181
182 /* t.add_ordered_block(block, sz, partition_sz) */
183 {
184 {
185 char* const pc = track_allocator::malloc(6 * partition_sz);
186 std::vector<void*> vpv;
187 vpv.push_back(pc);
188 vpv.push_back(pc + (2 * partition_sz));
189 vpv.push_back(pc + (4 * partition_sz));
190
191 do
192 {
193 test_simp_seg_store tstore;
194 tstore.add_ordered_block(vpv[0], 2*partition_sz, partition_sz);
195 tstore.add_ordered_block(vpv[1], 2*partition_sz, partition_sz);
196 tstore.add_ordered_block(vpv[2], 2*partition_sz, partition_sz);
197 // "Order-preserving"
198 test_is_order(tstore);
199 } while(std::next_permutation(vpv.begin(), vpv.end()));
200 }
201
202 {
203 test_simp_seg_store tstore;
204 char* const pc = track_allocator::malloc(6 * partition_sz);
205 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
206 tstore.add_ordered_block(pc + (4 * partition_sz),
207 (2 * partition_sz), partition_sz);
208 // "Order-preserving"
209 test_is_order(tstore);
210 }
211
212 {
213 test_simp_seg_store tstore;
214 char* const pc = track_allocator::malloc(6 * partition_sz);
215 tstore.add_ordered_block(pc + (4 * partition_sz),
216 (2 * partition_sz), partition_sz);
217 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
218 // "Order-preserving"
219 test_is_order(tstore);
220 }
221 }
222
223 /* t.ordered_free(chunk) */
224 {
225 char* const pc = track_allocator::malloc(6 * partition_sz);
226
227 test_simp_seg_store tstore;
228 tstore.add_block(pc, 6 * partition_sz, partition_sz);
229
230 std::vector<void*> vpv;
231 for(std::size_t i=0; i < 6; ++i) { vpv.push_back(tstore.malloc()); }
232 BOOST_ASSERT(tstore.empty());
233 pool_test_random_shuffle(vpv.begin(), vpv.end());
234
235 for(std::size_t i=0; i < 6; ++i)
236 {
237 tstore.ordered_free(vpv[i]);
238 }
239 // "Order-preserving"
240 test_is_order(tstore);
241 }
242
243 /* t.malloc_n(n, partition_sz) */
244 {
245 {
246 char* const pc = track_allocator::malloc(12 * partition_sz);
247 test_simp_seg_store tstore;
248 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
249 tstore.add_ordered_block(pc + (3 * partition_sz),
250 3 * partition_sz, partition_sz);
251 tstore.add_ordered_block(pc + (7 * partition_sz),
252 5 * partition_sz, partition_sz);
253
254 void* pvret = tstore.malloc_n(6, partition_sz);
255 BOOST_TEST(pvret == 0);
256
257 pvret = tstore.malloc_n(0, partition_sz);
258 // There's no prohibition against asking for zero elements
259 BOOST_TEST(pvret == 0);
260
261 pvret = tstore.malloc_n(3, partition_sz);
262 // Implicit assumption that contiguous sequence found is the first
263 // available while traversing from the start of the free list
264 BOOST_TEST(pvret == pc + (3 * partition_sz));
265
266 pvret = tstore.malloc_n(4, partition_sz);
267 BOOST_TEST(pvret == pc + (7 * partition_sz));
268
269 // There should still be two contiguous
270 // and one non-contiguous chunk left
271 std::size_t nchunks = 0;
272 while(!tstore.empty())
273 {
274 tstore.malloc();
275 ++nchunks;
276 }
277 BOOST_TEST(nchunks == 3);
278 }
279
280 {
281 char* const pc = track_allocator::malloc(12 * partition_sz);
282 test_simp_seg_store tstore;
283 tstore.add_ordered_block(pc, 2 * partition_sz, partition_sz);
284 tstore.add_ordered_block(pc + (3 * partition_sz),
285 3 * partition_sz, partition_sz);
286 tstore.add_ordered_block(pc + (7 * partition_sz),
287 5 * partition_sz, partition_sz);
288
289 tstore.malloc_n(3, partition_sz);
290 // "Order-preserving"
291 test_is_order(tstore);
292 }
293 }
294
295 for(std::set<char*>::iterator itr
296 = track_allocator::allocated_blocks.begin();
297 itr != track_allocator::allocated_blocks.end();
298 ++itr)
299 {
300 delete [] *itr;
301 }
302 track_allocator::allocated_blocks.clear();
303 return boost::report_errors();
304 }