]>
Commit | Line | Data |
---|---|---|
4f3755d1 MW |
1 | /* |
2 | * multiorder.c: Multi-order radix tree entry testing | |
3 | * Copyright (c) 2016 Intel Corporation | |
4 | * Author: Ross Zwisler <ross.zwisler@linux.intel.com> | |
5 | * Author: Matthew Wilcox <matthew.r.wilcox@intel.com> | |
6 | * | |
7 | * This program is free software; you can redistribute it and/or modify it | |
8 | * under the terms and conditions of the GNU General Public License, | |
9 | * version 2, as published by the Free Software Foundation. | |
10 | * | |
11 | * This program is distributed in the hope it will be useful, but WITHOUT | |
12 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for | |
14 | * more details. | |
15 | */ | |
16 | #include <linux/radix-tree.h> | |
17 | #include <linux/slab.h> | |
18 | #include <linux/errno.h> | |
19 | ||
20 | #include "test.h" | |
21 | ||
0fc9b8ca RZ |
22 | #define for_each_index(i, base, order) \ |
23 | for (i = base; i < base + (1 << order); i++) | |
24 | ||
25 | static void __multiorder_tag_test(int index, int order) | |
26 | { | |
27 | RADIX_TREE(tree, GFP_KERNEL); | |
28 | int base, err, i; | |
29 | ||
30 | /* our canonical entry */ | |
31 | base = index & ~((1 << order) - 1); | |
32 | ||
33 | printf("Multiorder tag test with index %d, canonical entry %d\n", | |
34 | index, base); | |
35 | ||
36 | err = item_insert_order(&tree, index, order); | |
37 | assert(!err); | |
38 | ||
39 | /* | |
40 | * Verify we get collisions for covered indices. We try and fail to | |
41 | * insert an exceptional entry so we don't leak memory via | |
42 | * item_insert_order(). | |
43 | */ | |
44 | for_each_index(i, base, order) { | |
45 | err = __radix_tree_insert(&tree, i, order, | |
46 | (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY)); | |
47 | assert(err == -EEXIST); | |
48 | } | |
49 | ||
50 | for_each_index(i, base, order) { | |
51 | assert(!radix_tree_tag_get(&tree, i, 0)); | |
52 | assert(!radix_tree_tag_get(&tree, i, 1)); | |
53 | } | |
54 | ||
55 | assert(radix_tree_tag_set(&tree, index, 0)); | |
56 | ||
57 | for_each_index(i, base, order) { | |
58 | assert(radix_tree_tag_get(&tree, i, 0)); | |
59 | assert(!radix_tree_tag_get(&tree, i, 1)); | |
60 | } | |
61 | ||
268f42de | 62 | assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1); |
0fc9b8ca RZ |
63 | assert(radix_tree_tag_clear(&tree, index, 0)); |
64 | ||
65 | for_each_index(i, base, order) { | |
66 | assert(!radix_tree_tag_get(&tree, i, 0)); | |
070c5ac2 | 67 | assert(radix_tree_tag_get(&tree, i, 1)); |
0fc9b8ca RZ |
68 | } |
69 | ||
070c5ac2 MW |
70 | assert(radix_tree_tag_clear(&tree, index, 1)); |
71 | ||
0fc9b8ca RZ |
72 | assert(!radix_tree_tagged(&tree, 0)); |
73 | assert(!radix_tree_tagged(&tree, 1)); | |
74 | ||
75 | item_kill_tree(&tree); | |
76 | } | |
77 | ||
3e3cdc68 MW |
78 | static void __multiorder_tag_test2(unsigned order, unsigned long index2) |
79 | { | |
80 | RADIX_TREE(tree, GFP_KERNEL); | |
81 | unsigned long index = (1 << order); | |
82 | index2 += index; | |
83 | ||
84 | assert(item_insert_order(&tree, 0, order) == 0); | |
85 | assert(item_insert(&tree, index2) == 0); | |
86 | ||
87 | assert(radix_tree_tag_set(&tree, 0, 0)); | |
88 | assert(radix_tree_tag_set(&tree, index2, 0)); | |
89 | ||
90 | assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2); | |
91 | ||
92 | item_kill_tree(&tree); | |
93 | } | |
94 | ||
0fc9b8ca RZ |
95 | static void multiorder_tag_tests(void) |
96 | { | |
3e3cdc68 MW |
97 | int i, j; |
98 | ||
0fc9b8ca RZ |
99 | /* test multi-order entry for indices 0-7 with no sibling pointers */ |
100 | __multiorder_tag_test(0, 3); | |
101 | __multiorder_tag_test(5, 3); | |
102 | ||
103 | /* test multi-order entry for indices 8-15 with no sibling pointers */ | |
104 | __multiorder_tag_test(8, 3); | |
105 | __multiorder_tag_test(15, 3); | |
106 | ||
107 | /* | |
108 | * Our order 5 entry covers indices 0-31 in a tree with height=2. | |
109 | * This is broken up as follows: | |
110 | * 0-7: canonical entry | |
111 | * 8-15: sibling 1 | |
112 | * 16-23: sibling 2 | |
113 | * 24-31: sibling 3 | |
114 | */ | |
115 | __multiorder_tag_test(0, 5); | |
116 | __multiorder_tag_test(29, 5); | |
117 | ||
118 | /* same test, but with indices 32-63 */ | |
119 | __multiorder_tag_test(32, 5); | |
120 | __multiorder_tag_test(44, 5); | |
121 | ||
122 | /* | |
123 | * Our order 8 entry covers indices 0-255 in a tree with height=3. | |
124 | * This is broken up as follows: | |
125 | * 0-63: canonical entry | |
126 | * 64-127: sibling 1 | |
127 | * 128-191: sibling 2 | |
128 | * 192-255: sibling 3 | |
129 | */ | |
130 | __multiorder_tag_test(0, 8); | |
131 | __multiorder_tag_test(190, 8); | |
132 | ||
133 | /* same test, but with indices 256-511 */ | |
134 | __multiorder_tag_test(256, 8); | |
135 | __multiorder_tag_test(300, 8); | |
136 | ||
137 | __multiorder_tag_test(0x12345678UL, 8); | |
3e3cdc68 MW |
138 | |
139 | for (i = 1; i < 10; i++) | |
140 | for (j = 0; j < (10 << i); j++) | |
141 | __multiorder_tag_test2(i, j); | |
0fc9b8ca RZ |
142 | } |
143 | ||
4f3755d1 MW |
144 | static void multiorder_check(unsigned long index, int order) |
145 | { | |
146 | unsigned long i; | |
147 | unsigned long min = index & ~((1UL << order) - 1); | |
148 | unsigned long max = min + (1UL << order); | |
62fd5258 | 149 | void **slot; |
101d9607 | 150 | struct item *item2 = item_create(min, order); |
4f3755d1 MW |
151 | RADIX_TREE(tree, GFP_KERNEL); |
152 | ||
153 | printf("Multiorder index %ld, order %d\n", index, order); | |
154 | ||
155 | assert(item_insert_order(&tree, index, order) == 0); | |
156 | ||
157 | for (i = min; i < max; i++) { | |
158 | struct item *item = item_lookup(&tree, i); | |
159 | assert(item != 0); | |
160 | assert(item->index == index); | |
161 | } | |
162 | for (i = 0; i < min; i++) | |
163 | item_check_absent(&tree, i); | |
164 | for (i = max; i < 2*max; i++) | |
165 | item_check_absent(&tree, i); | |
62fd5258 MW |
166 | for (i = min; i < max; i++) |
167 | assert(radix_tree_insert(&tree, i, item2) == -EEXIST); | |
168 | ||
169 | slot = radix_tree_lookup_slot(&tree, index); | |
170 | free(*slot); | |
6d75f366 | 171 | radix_tree_replace_slot(&tree, slot, item2); |
8a14f4d8 | 172 | for (i = min; i < max; i++) { |
62fd5258 MW |
173 | struct item *item = item_lookup(&tree, i); |
174 | assert(item != 0); | |
175 | assert(item->index == min); | |
8a14f4d8 | 176 | } |
4f3755d1 | 177 | |
62fd5258 | 178 | assert(item_delete(&tree, min) != 0); |
4f3755d1 MW |
179 | |
180 | for (i = 0; i < 2*max; i++) | |
181 | item_check_absent(&tree, i); | |
182 | } | |
183 | ||
afe0e395 MW |
184 | static void multiorder_shrink(unsigned long index, int order) |
185 | { | |
186 | unsigned long i; | |
187 | unsigned long max = 1 << order; | |
188 | RADIX_TREE(tree, GFP_KERNEL); | |
189 | struct radix_tree_node *node; | |
190 | ||
191 | printf("Multiorder shrink index %ld, order %d\n", index, order); | |
192 | ||
193 | assert(item_insert_order(&tree, 0, order) == 0); | |
194 | ||
195 | node = tree.rnode; | |
196 | ||
197 | assert(item_insert(&tree, index) == 0); | |
198 | assert(node != tree.rnode); | |
199 | ||
200 | assert(item_delete(&tree, index) != 0); | |
201 | assert(node == tree.rnode); | |
202 | ||
203 | for (i = 0; i < max; i++) { | |
204 | struct item *item = item_lookup(&tree, i); | |
205 | assert(item != 0); | |
206 | assert(item->index == 0); | |
207 | } | |
208 | for (i = max; i < 2*max; i++) | |
209 | item_check_absent(&tree, i); | |
210 | ||
211 | if (!item_delete(&tree, 0)) { | |
212 | printf("failed to delete index %ld (order %d)\n", index, order); abort(); | |
213 | } | |
214 | ||
215 | for (i = 0; i < 2*max; i++) | |
216 | item_check_absent(&tree, i); | |
217 | } | |
218 | ||
7b60e9ad MW |
219 | static void multiorder_insert_bug(void) |
220 | { | |
221 | RADIX_TREE(tree, GFP_KERNEL); | |
222 | ||
223 | item_insert(&tree, 0); | |
224 | radix_tree_tag_set(&tree, 0, 0); | |
225 | item_insert_order(&tree, 3 << 6, 6); | |
226 | ||
227 | item_kill_tree(&tree); | |
228 | } | |
229 | ||
643b57d0 RZ |
230 | void multiorder_iteration(void) |
231 | { | |
232 | RADIX_TREE(tree, GFP_KERNEL); | |
233 | struct radix_tree_iter iter; | |
234 | void **slot; | |
8c1244de | 235 | int i, j, err; |
643b57d0 RZ |
236 | |
237 | printf("Multiorder iteration test\n"); | |
238 | ||
239 | #define NUM_ENTRIES 11 | |
240 | int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128}; | |
241 | int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7}; | |
242 | ||
243 | for (i = 0; i < NUM_ENTRIES; i++) { | |
244 | err = item_insert_order(&tree, index[i], order[i]); | |
245 | assert(!err); | |
246 | } | |
247 | ||
8c1244de MW |
248 | for (j = 0; j < 256; j++) { |
249 | for (i = 0; i < NUM_ENTRIES; i++) | |
250 | if (j <= (index[i] | ((1 << order[i]) - 1))) | |
251 | break; | |
252 | ||
253 | radix_tree_for_each_slot(slot, &tree, &iter, j) { | |
254 | int height = order[i] / RADIX_TREE_MAP_SHIFT; | |
255 | int shift = height * RADIX_TREE_MAP_SHIFT; | |
148deab2 MW |
256 | unsigned long mask = (1UL << order[i]) - 1; |
257 | struct item *item = *slot; | |
8c1244de | 258 | |
148deab2 | 259 | assert((iter.index | mask) == (index[i] | mask)); |
8c1244de | 260 | assert(iter.shift == shift); |
148deab2 MW |
261 | assert(!radix_tree_is_internal_node(item)); |
262 | assert((item->index | mask) == (index[i] | mask)); | |
263 | assert(item->order == order[i]); | |
8c1244de MW |
264 | i++; |
265 | } | |
643b57d0 RZ |
266 | } |
267 | ||
268 | item_kill_tree(&tree); | |
269 | } | |
270 | ||
271 | void multiorder_tagged_iteration(void) | |
272 | { | |
273 | RADIX_TREE(tree, GFP_KERNEL); | |
274 | struct radix_tree_iter iter; | |
275 | void **slot; | |
8c1244de | 276 | int i, j; |
643b57d0 RZ |
277 | |
278 | printf("Multiorder tagged iteration test\n"); | |
279 | ||
280 | #define MT_NUM_ENTRIES 9 | |
281 | int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128}; | |
282 | int order[MT_NUM_ENTRIES] = {1, 0, 2, 4, 3, 1, 3, 0, 7}; | |
283 | ||
284 | #define TAG_ENTRIES 7 | |
285 | int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128}; | |
286 | ||
287 | for (i = 0; i < MT_NUM_ENTRIES; i++) | |
288 | assert(!item_insert_order(&tree, index[i], order[i])); | |
289 | ||
290 | assert(!radix_tree_tagged(&tree, 1)); | |
291 | ||
292 | for (i = 0; i < TAG_ENTRIES; i++) | |
293 | assert(radix_tree_tag_set(&tree, tag_index[i], 1)); | |
294 | ||
8c1244de | 295 | for (j = 0; j < 256; j++) { |
148deab2 | 296 | int k; |
8c1244de MW |
297 | |
298 | for (i = 0; i < TAG_ENTRIES; i++) { | |
299 | for (k = i; index[k] < tag_index[i]; k++) | |
300 | ; | |
301 | if (j <= (index[k] | ((1 << order[k]) - 1))) | |
302 | break; | |
303 | } | |
304 | ||
305 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { | |
148deab2 MW |
306 | unsigned long mask; |
307 | struct item *item = *slot; | |
8c1244de MW |
308 | for (k = i; index[k] < tag_index[i]; k++) |
309 | ; | |
148deab2 | 310 | mask = (1UL << order[k]) - 1; |
8c1244de | 311 | |
148deab2 MW |
312 | assert((iter.index | mask) == (tag_index[i] | mask)); |
313 | assert(!radix_tree_is_internal_node(item)); | |
314 | assert((item->index | mask) == (tag_index[i] | mask)); | |
315 | assert(item->order == order[k]); | |
8c1244de MW |
316 | i++; |
317 | } | |
643b57d0 RZ |
318 | } |
319 | ||
268f42de MW |
320 | assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) == |
321 | TAG_ENTRIES); | |
070c5ac2 | 322 | |
8c1244de MW |
323 | for (j = 0; j < 256; j++) { |
324 | int mask, k; | |
325 | ||
326 | for (i = 0; i < TAG_ENTRIES; i++) { | |
327 | for (k = i; index[k] < tag_index[i]; k++) | |
328 | ; | |
329 | if (j <= (index[k] | ((1 << order[k]) - 1))) | |
330 | break; | |
331 | } | |
332 | ||
333 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { | |
148deab2 | 334 | struct item *item = *slot; |
8c1244de MW |
335 | for (k = i; index[k] < tag_index[i]; k++) |
336 | ; | |
337 | mask = (1 << order[k]) - 1; | |
338 | ||
148deab2 MW |
339 | assert((iter.index | mask) == (tag_index[i] | mask)); |
340 | assert(!radix_tree_is_internal_node(item)); | |
341 | assert((item->index | mask) == (tag_index[i] | mask)); | |
342 | assert(item->order == order[k]); | |
8c1244de MW |
343 | i++; |
344 | } | |
070c5ac2 MW |
345 | } |
346 | ||
268f42de MW |
347 | assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0) |
348 | == TAG_ENTRIES); | |
070c5ac2 MW |
349 | i = 0; |
350 | radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) { | |
351 | assert(iter.index == tag_index[i]); | |
352 | i++; | |
353 | } | |
354 | ||
643b57d0 RZ |
355 | item_kill_tree(&tree); |
356 | } | |
357 | ||
e8de4340 | 358 | static void multiorder_join1(unsigned long index, |
175542f5 MW |
359 | unsigned order1, unsigned order2) |
360 | { | |
361 | unsigned long loc; | |
362 | void *item, *item2 = item_create(index + 1, order1); | |
363 | RADIX_TREE(tree, GFP_KERNEL); | |
364 | ||
365 | item_insert_order(&tree, index, order2); | |
366 | item = radix_tree_lookup(&tree, index); | |
367 | radix_tree_join(&tree, index + 1, order1, item2); | |
368 | loc = find_item(&tree, item); | |
369 | if (loc == -1) | |
370 | free(item); | |
371 | item = radix_tree_lookup(&tree, index + 1); | |
372 | assert(item == item2); | |
373 | item_kill_tree(&tree); | |
374 | } | |
375 | ||
e8de4340 | 376 | static void multiorder_join2(unsigned order1, unsigned order2) |
175542f5 MW |
377 | { |
378 | RADIX_TREE(tree, GFP_KERNEL); | |
379 | struct radix_tree_node *node; | |
380 | void *item1 = item_create(0, order1); | |
381 | void *item2; | |
382 | ||
383 | item_insert_order(&tree, 0, order2); | |
384 | radix_tree_insert(&tree, 1 << order2, (void *)0x12UL); | |
385 | item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); | |
386 | assert(item2 == (void *)0x12UL); | |
387 | assert(node->exceptional == 1); | |
388 | ||
389 | radix_tree_join(&tree, 0, order1, item1); | |
390 | item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); | |
391 | assert(item2 == item1); | |
392 | assert(node->exceptional == 0); | |
393 | item_kill_tree(&tree); | |
394 | } | |
395 | ||
e8de4340 MW |
396 | /* |
397 | * This test revealed an accounting bug for exceptional entries at one point. | |
398 | * Nodes were being freed back into the pool with an elevated exception count | |
399 | * by radix_tree_join() and then radix_tree_split() was failing to zero the | |
400 | * count of exceptional entries. | |
401 | */ | |
402 | static void multiorder_join3(unsigned int order) | |
403 | { | |
404 | RADIX_TREE(tree, GFP_KERNEL); | |
405 | struct radix_tree_node *node; | |
406 | void **slot; | |
407 | struct radix_tree_iter iter; | |
408 | unsigned long i; | |
409 | ||
410 | for (i = 0; i < (1 << order); i++) { | |
411 | radix_tree_insert(&tree, i, (void *)0x12UL); | |
412 | } | |
413 | ||
414 | radix_tree_join(&tree, 0, order, (void *)0x16UL); | |
415 | rcu_barrier(); | |
416 | ||
417 | radix_tree_split(&tree, 0, 0); | |
418 | ||
419 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { | |
420 | radix_tree_iter_replace(&tree, &iter, slot, (void *)0x12UL); | |
421 | } | |
422 | ||
423 | __radix_tree_lookup(&tree, 0, &node, NULL); | |
424 | assert(node->exceptional == node->count); | |
425 | ||
426 | item_kill_tree(&tree); | |
427 | } | |
428 | ||
175542f5 MW |
429 | static void multiorder_join(void) |
430 | { | |
431 | int i, j, idx; | |
432 | ||
433 | for (idx = 0; idx < 1024; idx = idx * 2 + 3) { | |
434 | for (i = 1; i < 15; i++) { | |
435 | for (j = 0; j < i; j++) { | |
e8de4340 | 436 | multiorder_join1(idx, i, j); |
175542f5 MW |
437 | } |
438 | } | |
439 | } | |
440 | ||
441 | for (i = 1; i < 15; i++) { | |
442 | for (j = 0; j < i; j++) { | |
e8de4340 | 443 | multiorder_join2(i, j); |
175542f5 MW |
444 | } |
445 | } | |
e8de4340 MW |
446 | |
447 | for (i = 3; i < 10; i++) { | |
448 | multiorder_join3(i); | |
449 | } | |
175542f5 MW |
450 | } |
451 | ||
2791653a MW |
452 | static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc) |
453 | { | |
454 | struct radix_tree_preload *rtp = &radix_tree_preloads; | |
455 | if (rtp->nr != 0) | |
456 | printf("split(%u %u) remaining %u\n", old_order, new_order, | |
457 | rtp->nr); | |
458 | /* | |
459 | * Can't check for equality here as some nodes may have been | |
460 | * RCU-freed while we ran. But we should never finish with more | |
461 | * nodes allocated since they should have all been preloaded. | |
462 | */ | |
463 | if (nr_allocated > alloc) | |
464 | printf("split(%u %u) allocated %u %u\n", old_order, new_order, | |
465 | alloc, nr_allocated); | |
466 | } | |
467 | ||
e157b555 MW |
468 | static void __multiorder_split(int old_order, int new_order) |
469 | { | |
2791653a | 470 | RADIX_TREE(tree, GFP_ATOMIC); |
e157b555 MW |
471 | void **slot; |
472 | struct radix_tree_iter iter; | |
2791653a MW |
473 | unsigned alloc; |
474 | ||
475 | radix_tree_preload(GFP_KERNEL); | |
476 | assert(item_insert_order(&tree, 0, old_order) == 0); | |
477 | radix_tree_preload_end(); | |
478 | ||
479 | /* Wipe out the preloaded cache or it'll confuse check_mem() */ | |
480 | radix_tree_cpu_dead(0); | |
e157b555 | 481 | |
e157b555 | 482 | radix_tree_tag_set(&tree, 0, 2); |
2791653a MW |
483 | |
484 | radix_tree_split_preload(old_order, new_order, GFP_KERNEL); | |
485 | alloc = nr_allocated; | |
e157b555 | 486 | radix_tree_split(&tree, 0, new_order); |
2791653a | 487 | check_mem(old_order, new_order, alloc); |
e157b555 MW |
488 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { |
489 | radix_tree_iter_replace(&tree, &iter, slot, | |
490 | item_create(iter.index, new_order)); | |
491 | } | |
2791653a | 492 | radix_tree_preload_end(); |
e157b555 MW |
493 | |
494 | item_kill_tree(&tree); | |
a90eb3a2 MW |
495 | } |
496 | ||
497 | static void __multiorder_split2(int old_order, int new_order) | |
498 | { | |
499 | RADIX_TREE(tree, GFP_KERNEL); | |
500 | void **slot; | |
501 | struct radix_tree_iter iter; | |
502 | struct radix_tree_node *node; | |
503 | void *item; | |
e157b555 MW |
504 | |
505 | __radix_tree_insert(&tree, 0, old_order, (void *)0x12); | |
506 | ||
507 | item = __radix_tree_lookup(&tree, 0, &node, NULL); | |
508 | assert(item == (void *)0x12); | |
509 | assert(node->exceptional > 0); | |
510 | ||
511 | radix_tree_split(&tree, 0, new_order); | |
512 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { | |
513 | radix_tree_iter_replace(&tree, &iter, slot, | |
514 | item_create(iter.index, new_order)); | |
515 | } | |
516 | ||
517 | item = __radix_tree_lookup(&tree, 0, &node, NULL); | |
518 | assert(item != (void *)0x12); | |
519 | assert(node->exceptional == 0); | |
520 | ||
521 | item_kill_tree(&tree); | |
a90eb3a2 MW |
522 | } |
523 | ||
524 | static void __multiorder_split3(int old_order, int new_order) | |
525 | { | |
526 | RADIX_TREE(tree, GFP_KERNEL); | |
527 | void **slot; | |
528 | struct radix_tree_iter iter; | |
529 | struct radix_tree_node *node; | |
530 | void *item; | |
e157b555 MW |
531 | |
532 | __radix_tree_insert(&tree, 0, old_order, (void *)0x12); | |
533 | ||
534 | item = __radix_tree_lookup(&tree, 0, &node, NULL); | |
535 | assert(item == (void *)0x12); | |
536 | assert(node->exceptional > 0); | |
537 | ||
538 | radix_tree_split(&tree, 0, new_order); | |
539 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { | |
540 | radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16); | |
541 | } | |
542 | ||
543 | item = __radix_tree_lookup(&tree, 0, &node, NULL); | |
544 | assert(item == (void *)0x16); | |
545 | assert(node->exceptional > 0); | |
546 | ||
547 | item_kill_tree(&tree); | |
a90eb3a2 MW |
548 | |
549 | __radix_tree_insert(&tree, 0, old_order, (void *)0x12); | |
550 | ||
551 | item = __radix_tree_lookup(&tree, 0, &node, NULL); | |
552 | assert(item == (void *)0x12); | |
553 | assert(node->exceptional > 0); | |
554 | ||
555 | radix_tree_split(&tree, 0, new_order); | |
556 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { | |
557 | if (iter.index == (1 << new_order)) | |
558 | radix_tree_iter_replace(&tree, &iter, slot, | |
559 | (void *)0x16); | |
560 | else | |
561 | radix_tree_iter_replace(&tree, &iter, slot, NULL); | |
562 | } | |
563 | ||
564 | item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL); | |
565 | assert(item == (void *)0x16); | |
566 | assert(node->count == node->exceptional); | |
567 | do { | |
568 | node = node->parent; | |
569 | if (!node) | |
570 | break; | |
571 | assert(node->count == 1); | |
572 | assert(node->exceptional == 0); | |
573 | } while (1); | |
574 | ||
575 | item_kill_tree(&tree); | |
e157b555 MW |
576 | } |
577 | ||
578 | static void multiorder_split(void) | |
579 | { | |
580 | int i, j; | |
581 | ||
a90eb3a2 MW |
582 | for (i = 3; i < 11; i++) |
583 | for (j = 0; j < i; j++) { | |
e157b555 | 584 | __multiorder_split(i, j); |
a90eb3a2 MW |
585 | __multiorder_split2(i, j); |
586 | __multiorder_split3(i, j); | |
587 | } | |
588 | } | |
589 | ||
590 | static void multiorder_account(void) | |
591 | { | |
592 | RADIX_TREE(tree, GFP_KERNEL); | |
593 | struct radix_tree_node *node; | |
594 | void **slot; | |
595 | ||
596 | item_insert_order(&tree, 0, 5); | |
597 | ||
598 | __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12); | |
599 | __radix_tree_lookup(&tree, 0, &node, NULL); | |
600 | assert(node->count == node->exceptional * 2); | |
601 | radix_tree_delete(&tree, 1 << 5); | |
602 | assert(node->exceptional == 0); | |
603 | ||
604 | __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12); | |
605 | __radix_tree_lookup(&tree, 1 << 5, &node, &slot); | |
606 | assert(node->count == node->exceptional * 2); | |
607 | __radix_tree_replace(&tree, node, slot, NULL, NULL, NULL); | |
608 | assert(node->exceptional == 0); | |
609 | ||
610 | item_kill_tree(&tree); | |
e157b555 MW |
611 | } |
612 | ||
4f3755d1 MW |
613 | void multiorder_checks(void) |
614 | { | |
615 | int i; | |
616 | ||
617 | for (i = 0; i < 20; i++) { | |
618 | multiorder_check(200, i); | |
619 | multiorder_check(0, i); | |
620 | multiorder_check((1UL << i) + 1, i); | |
621 | } | |
afe0e395 MW |
622 | |
623 | for (i = 0; i < 15; i++) | |
624 | multiorder_shrink((1UL << (i + RADIX_TREE_MAP_SHIFT)), i); | |
625 | ||
7b60e9ad | 626 | multiorder_insert_bug(); |
0fc9b8ca | 627 | multiorder_tag_tests(); |
643b57d0 RZ |
628 | multiorder_iteration(); |
629 | multiorder_tagged_iteration(); | |
175542f5 | 630 | multiorder_join(); |
e157b555 | 631 | multiorder_split(); |
a90eb3a2 | 632 | multiorder_account(); |
2791653a MW |
633 | |
634 | radix_tree_cpu_dead(0); | |
4f3755d1 | 635 | } |