2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/kmemleak.h>
31 #include <linux/notifier.h>
32 #include <linux/cpu.h>
33 #include <linux/string.h>
34 #include <linux/bitops.h>
35 #include <linux/rcupdate.h>
36 #include <linux/preempt.h> /* in_interrupt() */
40 * The height_to_maxindex array needs to be one deeper than the maximum
41 * path as height 0 holds only 1 entry.
43 static unsigned long height_to_maxindex
[RADIX_TREE_MAX_PATH
+ 1] __read_mostly
;
46 * Radix tree node cache.
48 static struct kmem_cache
*radix_tree_node_cachep
;
51 * The radix tree is variable-height, so an insert operation not only has
52 * to build the branch to its corresponding item, it also has to build the
53 * branch to existing items if the size has to be increased (by
56 * The worst case is a zero height tree with just a single item at index 0,
57 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
58 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
61 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
64 * Per-cpu pool of preloaded nodes
66 struct radix_tree_preload
{
68 /* nodes->private_data points to next preallocated node */
69 struct radix_tree_node
*nodes
;
71 static DEFINE_PER_CPU(struct radix_tree_preload
, radix_tree_preloads
) = { 0, };
73 static inline void *ptr_to_indirect(void *ptr
)
75 return (void *)((unsigned long)ptr
| RADIX_TREE_INDIRECT_PTR
);
78 static inline void *indirect_to_ptr(void *ptr
)
80 return (void *)((unsigned long)ptr
& ~RADIX_TREE_INDIRECT_PTR
);
83 static inline gfp_t
root_gfp_mask(struct radix_tree_root
*root
)
85 return root
->gfp_mask
& __GFP_BITS_MASK
;
88 static inline void tag_set(struct radix_tree_node
*node
, unsigned int tag
,
91 __set_bit(offset
, node
->tags
[tag
]);
94 static inline void tag_clear(struct radix_tree_node
*node
, unsigned int tag
,
97 __clear_bit(offset
, node
->tags
[tag
]);
100 static inline int tag_get(struct radix_tree_node
*node
, unsigned int tag
,
103 return test_bit(offset
, node
->tags
[tag
]);
106 static inline void root_tag_set(struct radix_tree_root
*root
, unsigned int tag
)
108 root
->gfp_mask
|= (__force gfp_t
)(1 << (tag
+ __GFP_BITS_SHIFT
));
111 static inline void root_tag_clear(struct radix_tree_root
*root
, unsigned int tag
)
113 root
->gfp_mask
&= (__force gfp_t
)~(1 << (tag
+ __GFP_BITS_SHIFT
));
116 static inline void root_tag_clear_all(struct radix_tree_root
*root
)
118 root
->gfp_mask
&= __GFP_BITS_MASK
;
121 static inline int root_tag_get(struct radix_tree_root
*root
, unsigned int tag
)
123 return (__force
unsigned)root
->gfp_mask
& (1 << (tag
+ __GFP_BITS_SHIFT
));
127 * Returns 1 if any slot in the node has this tag set.
128 * Otherwise returns 0.
130 static inline int any_tag_set(struct radix_tree_node
*node
, unsigned int tag
)
133 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
134 if (node
->tags
[tag
][idx
])
141 * radix_tree_find_next_bit - find the next set bit in a memory region
143 * @addr: The address to base the search on
144 * @size: The bitmap size in bits
145 * @offset: The bitnumber to start searching at
147 * Unrollable variant of find_next_bit() for constant size arrays.
148 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
149 * Returns next bit offset, or size if nothing found.
151 static __always_inline
unsigned long
152 radix_tree_find_next_bit(const unsigned long *addr
,
153 unsigned long size
, unsigned long offset
)
155 if (!__builtin_constant_p(size
))
156 return find_next_bit(addr
, size
, offset
);
161 addr
+= offset
/ BITS_PER_LONG
;
162 tmp
= *addr
>> (offset
% BITS_PER_LONG
);
164 return __ffs(tmp
) + offset
;
165 offset
= (offset
+ BITS_PER_LONG
) & ~(BITS_PER_LONG
- 1);
166 while (offset
< size
) {
169 return __ffs(tmp
) + offset
;
170 offset
+= BITS_PER_LONG
;
177 static void dump_node(void *slot
, int height
, int offset
)
179 struct radix_tree_node
*node
;
186 pr_debug("radix entry %p offset %d\n", slot
, offset
);
190 node
= indirect_to_ptr(slot
);
191 pr_debug("radix node: %p offset %d tags %lx %lx %lx path %x count %d parent %p\n",
192 slot
, offset
, node
->tags
[0][0], node
->tags
[1][0],
193 node
->tags
[2][0], node
->path
, node
->count
, node
->parent
);
195 for (i
= 0; i
< RADIX_TREE_MAP_SIZE
; i
++)
196 dump_node(node
->slots
[i
], height
- 1, i
);
200 static void radix_tree_dump(struct radix_tree_root
*root
)
202 pr_debug("radix root: %p height %d rnode %p tags %x\n",
203 root
, root
->height
, root
->rnode
,
204 root
->gfp_mask
>> __GFP_BITS_SHIFT
);
205 if (!radix_tree_is_indirect_ptr(root
->rnode
))
207 dump_node(root
->rnode
, root
->height
, 0);
212 * This assumes that the caller has performed appropriate preallocation, and
213 * that the caller has pinned this thread of control to the current CPU.
215 static struct radix_tree_node
*
216 radix_tree_node_alloc(struct radix_tree_root
*root
)
218 struct radix_tree_node
*ret
= NULL
;
219 gfp_t gfp_mask
= root_gfp_mask(root
);
222 * Preload code isn't irq safe and it doesn't make sence to use
223 * preloading in the interrupt anyway as all the allocations have to
224 * be atomic. So just do normal allocation when in interrupt.
226 if (!gfpflags_allow_blocking(gfp_mask
) && !in_interrupt()) {
227 struct radix_tree_preload
*rtp
;
230 * Even if the caller has preloaded, try to allocate from the
231 * cache first for the new node to get accounted.
233 ret
= kmem_cache_alloc(radix_tree_node_cachep
,
234 gfp_mask
| __GFP_ACCOUNT
| __GFP_NOWARN
);
239 * Provided the caller has preloaded here, we will always
240 * succeed in getting a node here (and never reach
243 rtp
= this_cpu_ptr(&radix_tree_preloads
);
246 rtp
->nodes
= ret
->private_data
;
247 ret
->private_data
= NULL
;
251 * Update the allocation stack trace as this is more useful
254 kmemleak_update_trace(ret
);
257 ret
= kmem_cache_alloc(radix_tree_node_cachep
,
258 gfp_mask
| __GFP_ACCOUNT
);
260 BUG_ON(radix_tree_is_indirect_ptr(ret
));
264 static void radix_tree_node_rcu_free(struct rcu_head
*head
)
266 struct radix_tree_node
*node
=
267 container_of(head
, struct radix_tree_node
, rcu_head
);
271 * must only free zeroed nodes into the slab. radix_tree_shrink
272 * can leave us with a non-NULL entry in the first slot, so clear
273 * that here to make sure.
275 for (i
= 0; i
< RADIX_TREE_MAX_TAGS
; i
++)
276 tag_clear(node
, i
, 0);
278 node
->slots
[0] = NULL
;
281 kmem_cache_free(radix_tree_node_cachep
, node
);
285 radix_tree_node_free(struct radix_tree_node
*node
)
287 call_rcu(&node
->rcu_head
, radix_tree_node_rcu_free
);
291 * Load up this CPU's radix_tree_node buffer with sufficient objects to
292 * ensure that the addition of a single element in the tree cannot fail. On
293 * success, return zero, with preemption disabled. On error, return -ENOMEM
294 * with preemption not disabled.
296 * To make use of this facility, the radix tree must be initialised without
297 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
299 static int __radix_tree_preload(gfp_t gfp_mask
)
301 struct radix_tree_preload
*rtp
;
302 struct radix_tree_node
*node
;
306 rtp
= this_cpu_ptr(&radix_tree_preloads
);
307 while (rtp
->nr
< RADIX_TREE_PRELOAD_SIZE
) {
309 node
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
313 rtp
= this_cpu_ptr(&radix_tree_preloads
);
314 if (rtp
->nr
< RADIX_TREE_PRELOAD_SIZE
) {
315 node
->private_data
= rtp
->nodes
;
319 kmem_cache_free(radix_tree_node_cachep
, node
);
328 * Load up this CPU's radix_tree_node buffer with sufficient objects to
329 * ensure that the addition of a single element in the tree cannot fail. On
330 * success, return zero, with preemption disabled. On error, return -ENOMEM
331 * with preemption not disabled.
333 * To make use of this facility, the radix tree must be initialised without
334 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
336 int radix_tree_preload(gfp_t gfp_mask
)
338 /* Warn on non-sensical use... */
339 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask
));
340 return __radix_tree_preload(gfp_mask
);
342 EXPORT_SYMBOL(radix_tree_preload
);
345 * The same as above function, except we don't guarantee preloading happens.
346 * We do it, if we decide it helps. On success, return zero with preemption
347 * disabled. On error, return -ENOMEM with preemption not disabled.
349 int radix_tree_maybe_preload(gfp_t gfp_mask
)
351 if (gfpflags_allow_blocking(gfp_mask
))
352 return __radix_tree_preload(gfp_mask
);
353 /* Preloading doesn't help anything with this gfp mask, skip it */
357 EXPORT_SYMBOL(radix_tree_maybe_preload
);
360 * Return the maximum key which can be store into a
361 * radix tree with height HEIGHT.
363 static inline unsigned long radix_tree_maxindex(unsigned int height
)
365 return height_to_maxindex
[height
];
369 * Extend a radix tree so it can store key @index.
371 static int radix_tree_extend(struct radix_tree_root
*root
,
372 unsigned long index
, unsigned order
)
374 struct radix_tree_node
*node
;
375 struct radix_tree_node
*slot
;
379 /* Figure out what the height should be. */
380 height
= root
->height
+ 1;
381 while (index
> radix_tree_maxindex(height
))
384 if ((root
->rnode
== NULL
) && (order
== 0)) {
385 root
->height
= height
;
390 unsigned int newheight
;
391 if (!(node
= radix_tree_node_alloc(root
)))
394 /* Propagate the aggregated tag info into the new root */
395 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
396 if (root_tag_get(root
, tag
))
397 tag_set(node
, tag
, 0);
400 /* Increase the height. */
401 newheight
= root
->height
+1;
402 BUG_ON(newheight
& ~RADIX_TREE_HEIGHT_MASK
);
403 node
->path
= newheight
;
407 if (radix_tree_is_indirect_ptr(slot
) && newheight
> 1) {
408 slot
= indirect_to_ptr(slot
);
410 slot
= ptr_to_indirect(slot
);
412 node
->slots
[0] = slot
;
413 node
= ptr_to_indirect(node
);
414 rcu_assign_pointer(root
->rnode
, node
);
415 root
->height
= newheight
;
416 } while (height
> root
->height
);
422 * __radix_tree_create - create a slot in a radix tree
423 * @root: radix tree root
425 * @order: index occupies 2^order aligned slots
426 * @nodep: returns node
427 * @slotp: returns slot
429 * Create, if necessary, and return the node and slot for an item
430 * at position @index in the radix tree @root.
432 * Until there is more than one item in the tree, no nodes are
433 * allocated and @root->rnode is used as a direct slot instead of
434 * pointing to a node, in which case *@nodep will be NULL.
436 * Returns -ENOMEM, or 0 for success.
438 int __radix_tree_create(struct radix_tree_root
*root
, unsigned long index
,
439 unsigned order
, struct radix_tree_node
**nodep
,
442 struct radix_tree_node
*node
= NULL
, *slot
;
443 unsigned int height
, shift
, offset
;
446 BUG_ON((0 < order
) && (order
< RADIX_TREE_MAP_SHIFT
));
448 /* Make sure the tree is high enough. */
449 if (index
> radix_tree_maxindex(root
->height
)) {
450 error
= radix_tree_extend(root
, index
, order
);
457 height
= root
->height
;
458 shift
= height
* RADIX_TREE_MAP_SHIFT
;
460 offset
= 0; /* uninitialised var warning */
461 while (shift
> order
) {
463 /* Have to add a child node. */
464 if (!(slot
= radix_tree_node_alloc(root
)))
469 rcu_assign_pointer(node
->slots
[offset
],
470 ptr_to_indirect(slot
));
472 slot
->path
|= offset
<< RADIX_TREE_HEIGHT_SHIFT
;
474 rcu_assign_pointer(root
->rnode
,
475 ptr_to_indirect(slot
));
476 } else if (!radix_tree_is_indirect_ptr(slot
))
479 /* Go a level down */
481 shift
-= RADIX_TREE_MAP_SHIFT
;
482 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
483 node
= indirect_to_ptr(slot
);
484 slot
= node
->slots
[offset
];
487 #ifdef CONFIG_RADIX_TREE_MULTIORDER
488 /* Insert pointers to the canonical entry */
489 if ((shift
- order
) > 0) {
490 int i
, n
= 1 << (shift
- order
);
491 offset
= offset
& ~(n
- 1);
492 slot
= ptr_to_indirect(&node
->slots
[offset
]);
493 for (i
= 0; i
< n
; i
++) {
494 if (node
->slots
[offset
+ i
])
498 for (i
= 1; i
< n
; i
++) {
499 rcu_assign_pointer(node
->slots
[offset
+ i
], slot
);
508 *slotp
= node
? node
->slots
+ offset
: (void **)&root
->rnode
;
513 * __radix_tree_insert - insert into a radix tree
514 * @root: radix tree root
516 * @order: key covers the 2^order indices around index
517 * @item: item to insert
519 * Insert an item into the radix tree at position @index.
521 int __radix_tree_insert(struct radix_tree_root
*root
, unsigned long index
,
522 unsigned order
, void *item
)
524 struct radix_tree_node
*node
;
528 BUG_ON(radix_tree_is_indirect_ptr(item
));
530 error
= __radix_tree_create(root
, index
, order
, &node
, &slot
);
535 rcu_assign_pointer(*slot
, item
);
539 BUG_ON(tag_get(node
, 0, index
& RADIX_TREE_MAP_MASK
));
540 BUG_ON(tag_get(node
, 1, index
& RADIX_TREE_MAP_MASK
));
542 BUG_ON(root_tag_get(root
, 0));
543 BUG_ON(root_tag_get(root
, 1));
548 EXPORT_SYMBOL(__radix_tree_insert
);
551 * __radix_tree_lookup - lookup an item in a radix tree
552 * @root: radix tree root
554 * @nodep: returns node
555 * @slotp: returns slot
557 * Lookup and return the item at position @index in the radix
560 * Until there is more than one item in the tree, no nodes are
561 * allocated and @root->rnode is used as a direct slot instead of
562 * pointing to a node, in which case *@nodep will be NULL.
564 void *__radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
,
565 struct radix_tree_node
**nodep
, void ***slotp
)
567 struct radix_tree_node
*node
, *parent
;
568 unsigned int height
, shift
;
571 node
= rcu_dereference_raw(root
->rnode
);
575 if (!radix_tree_is_indirect_ptr(node
)) {
582 *slotp
= (void **)&root
->rnode
;
585 node
= indirect_to_ptr(node
);
587 height
= node
->path
& RADIX_TREE_HEIGHT_MASK
;
588 if (index
> radix_tree_maxindex(height
))
591 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
595 slot
= node
->slots
+ ((index
>> shift
) & RADIX_TREE_MAP_MASK
);
596 node
= rcu_dereference_raw(*slot
);
599 if (!radix_tree_is_indirect_ptr(node
))
601 node
= indirect_to_ptr(node
);
603 shift
-= RADIX_TREE_MAP_SHIFT
;
605 } while (height
> 0);
615 * radix_tree_lookup_slot - lookup a slot in a radix tree
616 * @root: radix tree root
619 * Returns: the slot corresponding to the position @index in the
620 * radix tree @root. This is useful for update-if-exists operations.
622 * This function can be called under rcu_read_lock iff the slot is not
623 * modified by radix_tree_replace_slot, otherwise it must be called
624 * exclusive from other writers. Any dereference of the slot must be done
625 * using radix_tree_deref_slot.
627 void **radix_tree_lookup_slot(struct radix_tree_root
*root
, unsigned long index
)
631 if (!__radix_tree_lookup(root
, index
, NULL
, &slot
))
635 EXPORT_SYMBOL(radix_tree_lookup_slot
);
638 * radix_tree_lookup - perform lookup operation on a radix tree
639 * @root: radix tree root
642 * Lookup the item at the position @index in the radix tree @root.
644 * This function can be called under rcu_read_lock, however the caller
645 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
646 * them safely). No RCU barriers are required to access or modify the
647 * returned item, however.
649 void *radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
)
651 return __radix_tree_lookup(root
, index
, NULL
, NULL
);
653 EXPORT_SYMBOL(radix_tree_lookup
);
656 * radix_tree_tag_set - set a tag on a radix tree node
657 * @root: radix tree root
661 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
662 * corresponding to @index in the radix tree. From
663 * the root all the way down to the leaf node.
665 * Returns the address of the tagged item. Setting a tag on a not-present
668 void *radix_tree_tag_set(struct radix_tree_root
*root
,
669 unsigned long index
, unsigned int tag
)
671 unsigned int height
, shift
;
672 struct radix_tree_node
*slot
;
674 height
= root
->height
;
675 BUG_ON(index
> radix_tree_maxindex(height
));
677 slot
= indirect_to_ptr(root
->rnode
);
678 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
683 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
684 if (!tag_get(slot
, tag
, offset
))
685 tag_set(slot
, tag
, offset
);
686 slot
= slot
->slots
[offset
];
687 BUG_ON(slot
== NULL
);
688 if (!radix_tree_is_indirect_ptr(slot
))
690 slot
= indirect_to_ptr(slot
);
691 shift
-= RADIX_TREE_MAP_SHIFT
;
695 /* set the root's tag bit */
696 if (slot
&& !root_tag_get(root
, tag
))
697 root_tag_set(root
, tag
);
701 EXPORT_SYMBOL(radix_tree_tag_set
);
704 * radix_tree_tag_clear - clear a tag on a radix tree node
705 * @root: radix tree root
709 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
710 * corresponding to @index in the radix tree. If
711 * this causes the leaf node to have no tags set then clear the tag in the
712 * next-to-leaf node, etc.
714 * Returns the address of the tagged item on success, else NULL. ie:
715 * has the same return value and semantics as radix_tree_lookup().
717 void *radix_tree_tag_clear(struct radix_tree_root
*root
,
718 unsigned long index
, unsigned int tag
)
720 struct radix_tree_node
*node
= NULL
;
721 struct radix_tree_node
*slot
= NULL
;
722 unsigned int height
, shift
;
723 int uninitialized_var(offset
);
725 height
= root
->height
;
726 if (index
> radix_tree_maxindex(height
))
729 shift
= height
* RADIX_TREE_MAP_SHIFT
;
735 if (!radix_tree_is_indirect_ptr(slot
))
737 slot
= indirect_to_ptr(slot
);
739 shift
-= RADIX_TREE_MAP_SHIFT
;
740 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
742 slot
= slot
->slots
[offset
];
749 if (!tag_get(node
, tag
, offset
))
751 tag_clear(node
, tag
, offset
);
752 if (any_tag_set(node
, tag
))
755 index
>>= RADIX_TREE_MAP_SHIFT
;
756 offset
= index
& RADIX_TREE_MAP_MASK
;
760 /* clear the root's tag bit */
761 if (root_tag_get(root
, tag
))
762 root_tag_clear(root
, tag
);
767 EXPORT_SYMBOL(radix_tree_tag_clear
);
770 * radix_tree_tag_get - get a tag on a radix tree node
771 * @root: radix tree root
773 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
777 * 0: tag not present or not set
780 * Note that the return value of this function may not be relied on, even if
781 * the RCU lock is held, unless tag modification and node deletion are excluded
784 int radix_tree_tag_get(struct radix_tree_root
*root
,
785 unsigned long index
, unsigned int tag
)
787 unsigned int height
, shift
;
788 struct radix_tree_node
*node
;
790 /* check the root's tag bit */
791 if (!root_tag_get(root
, tag
))
794 node
= rcu_dereference_raw(root
->rnode
);
798 if (!radix_tree_is_indirect_ptr(node
))
800 node
= indirect_to_ptr(node
);
802 height
= node
->path
& RADIX_TREE_HEIGHT_MASK
;
803 if (index
> radix_tree_maxindex(height
))
806 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
813 node
= indirect_to_ptr(node
);
815 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
816 if (!tag_get(node
, tag
, offset
))
820 node
= rcu_dereference_raw(node
->slots
[offset
]);
821 if (!radix_tree_is_indirect_ptr(node
))
823 shift
-= RADIX_TREE_MAP_SHIFT
;
827 EXPORT_SYMBOL(radix_tree_tag_get
);
830 * radix_tree_next_chunk - find next chunk of slots for iteration
832 * @root: radix tree root
833 * @iter: iterator state
834 * @flags: RADIX_TREE_ITER_* flags and tag index
835 * Returns: pointer to chunk first slot, or NULL if iteration is over
837 void **radix_tree_next_chunk(struct radix_tree_root
*root
,
838 struct radix_tree_iter
*iter
, unsigned flags
)
840 unsigned shift
, tag
= flags
& RADIX_TREE_ITER_TAG_MASK
;
841 struct radix_tree_node
*rnode
, *node
;
842 unsigned long index
, offset
, height
;
844 if ((flags
& RADIX_TREE_ITER_TAGGED
) && !root_tag_get(root
, tag
))
848 * Catch next_index overflow after ~0UL. iter->index never overflows
849 * during iterating; it can be zero only at the beginning.
850 * And we cannot overflow iter->next_index in a single step,
851 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
853 * This condition also used by radix_tree_next_slot() to stop
854 * contiguous iterating, and forbid swithing to the next chunk.
856 index
= iter
->next_index
;
857 if (!index
&& iter
->index
)
860 rnode
= rcu_dereference_raw(root
->rnode
);
861 if (radix_tree_is_indirect_ptr(rnode
)) {
862 rnode
= indirect_to_ptr(rnode
);
863 } else if (rnode
&& !index
) {
864 /* Single-slot tree */
866 iter
->next_index
= 1;
868 return (void **)&root
->rnode
;
873 height
= rnode
->path
& RADIX_TREE_HEIGHT_MASK
;
874 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
875 offset
= index
>> shift
;
877 /* Index outside of the tree */
878 if (offset
>= RADIX_TREE_MAP_SIZE
)
883 struct radix_tree_node
*slot
;
884 if ((flags
& RADIX_TREE_ITER_TAGGED
) ?
885 !test_bit(offset
, node
->tags
[tag
]) :
886 !node
->slots
[offset
]) {
888 if (flags
& RADIX_TREE_ITER_CONTIG
)
891 if (flags
& RADIX_TREE_ITER_TAGGED
)
892 offset
= radix_tree_find_next_bit(
897 while (++offset
< RADIX_TREE_MAP_SIZE
) {
898 if (node
->slots
[offset
])
901 index
&= ~((RADIX_TREE_MAP_SIZE
<< shift
) - 1);
902 index
+= offset
<< shift
;
903 /* Overflow after ~0UL */
906 if (offset
== RADIX_TREE_MAP_SIZE
)
910 /* This is leaf-node */
914 slot
= rcu_dereference_raw(node
->slots
[offset
]);
917 if (!radix_tree_is_indirect_ptr(slot
))
919 node
= indirect_to_ptr(slot
);
920 shift
-= RADIX_TREE_MAP_SHIFT
;
921 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
924 /* Update the iterator state */
926 iter
->next_index
= (index
| RADIX_TREE_MAP_MASK
) + 1;
928 /* Construct iter->tags bit-mask from node->tags[tag] array */
929 if (flags
& RADIX_TREE_ITER_TAGGED
) {
930 unsigned tag_long
, tag_bit
;
932 tag_long
= offset
/ BITS_PER_LONG
;
933 tag_bit
= offset
% BITS_PER_LONG
;
934 iter
->tags
= node
->tags
[tag
][tag_long
] >> tag_bit
;
935 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
936 if (tag_long
< RADIX_TREE_TAG_LONGS
- 1) {
937 /* Pick tags from next element */
939 iter
->tags
|= node
->tags
[tag
][tag_long
+ 1] <<
940 (BITS_PER_LONG
- tag_bit
);
941 /* Clip chunk size, here only BITS_PER_LONG tags */
942 iter
->next_index
= index
+ BITS_PER_LONG
;
946 return node
->slots
+ offset
;
948 EXPORT_SYMBOL(radix_tree_next_chunk
);
951 * radix_tree_range_tag_if_tagged - for each item in given range set given
952 * tag if item has another tag set
953 * @root: radix tree root
954 * @first_indexp: pointer to a starting index of a range to scan
955 * @last_index: last index of a range to scan
956 * @nr_to_tag: maximum number items to tag
957 * @iftag: tag index to test
958 * @settag: tag index to set if tested tag is set
960 * This function scans range of radix tree from first_index to last_index
961 * (inclusive). For each item in the range if iftag is set, the function sets
962 * also settag. The function stops either after tagging nr_to_tag items or
963 * after reaching last_index.
965 * The tags must be set from the leaf level only and propagated back up the
966 * path to the root. We must do this so that we resolve the full path before
967 * setting any tags on intermediate nodes. If we set tags as we descend, then
968 * we can get to the leaf node and find that the index that has the iftag
969 * set is outside the range we are scanning. This reults in dangling tags and
970 * can lead to problems with later tag operations (e.g. livelocks on lookups).
972 * The function returns number of leaves where the tag was set and sets
973 * *first_indexp to the first unscanned index.
974 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
975 * be prepared to handle that.
977 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root
*root
,
978 unsigned long *first_indexp
, unsigned long last_index
,
979 unsigned long nr_to_tag
,
980 unsigned int iftag
, unsigned int settag
)
982 unsigned int height
= root
->height
;
983 struct radix_tree_node
*node
= NULL
;
984 struct radix_tree_node
*slot
;
986 unsigned long tagged
= 0;
987 unsigned long index
= *first_indexp
;
989 last_index
= min(last_index
, radix_tree_maxindex(height
));
990 if (index
> last_index
)
994 if (!root_tag_get(root
, iftag
)) {
995 *first_indexp
= last_index
+ 1;
999 *first_indexp
= last_index
+ 1;
1000 root_tag_set(root
, settag
);
1004 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
1005 slot
= indirect_to_ptr(root
->rnode
);
1008 unsigned long upindex
;
1011 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
1012 if (!slot
->slots
[offset
])
1014 if (!tag_get(slot
, iftag
, offset
))
1018 slot
= slot
->slots
[offset
];
1019 if (radix_tree_is_indirect_ptr(slot
)) {
1020 slot
= indirect_to_ptr(slot
);
1021 shift
-= RADIX_TREE_MAP_SHIFT
;
1025 node
= node
->parent
;
1030 tagged
+= 1 << shift
;
1031 tag_set(slot
, settag
, offset
);
1033 /* walk back up the path tagging interior nodes */
1036 upindex
>>= RADIX_TREE_MAP_SHIFT
;
1037 offset
= upindex
& RADIX_TREE_MAP_MASK
;
1039 /* stop if we find a node with the tag already set */
1040 if (tag_get(node
, settag
, offset
))
1042 tag_set(node
, settag
, offset
);
1043 node
= node
->parent
;
1047 * Small optimization: now clear that node pointer.
1048 * Since all of this slot's ancestors now have the tag set
1049 * from setting it above, we have no further need to walk
1050 * back up the tree setting tags, until we update slot to
1051 * point to another radix_tree_node.
1056 /* Go to next item at level determined by 'shift' */
1057 index
= ((index
>> shift
) + 1) << shift
;
1058 /* Overflow can happen when last_index is ~0UL... */
1059 if (index
> last_index
|| !index
)
1061 if (tagged
>= nr_to_tag
)
1063 while (((index
>> shift
) & RADIX_TREE_MAP_MASK
) == 0) {
1065 * We've fully scanned this node. Go up. Because
1066 * last_index is guaranteed to be in the tree, what
1067 * we do below cannot wander astray.
1069 slot
= slot
->parent
;
1070 shift
+= RADIX_TREE_MAP_SHIFT
;
1074 * We need not to tag the root tag if there is no tag which is set with
1075 * settag within the range from *first_indexp to last_index.
1078 root_tag_set(root
, settag
);
1079 *first_indexp
= index
;
1083 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged
);
1086 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1087 * @root: radix tree root
1088 * @results: where the results of the lookup are placed
1089 * @first_index: start the lookup from this key
1090 * @max_items: place up to this many items at *results
1092 * Performs an index-ascending scan of the tree for present items. Places
1093 * them at *@results and returns the number of items which were placed at
1096 * The implementation is naive.
1098 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1099 * rcu_read_lock. In this case, rather than the returned results being
1100 * an atomic snapshot of the tree at a single point in time, the semantics
1101 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
1102 * have been issued in individual locks, and results stored in 'results'.
1105 radix_tree_gang_lookup(struct radix_tree_root
*root
, void **results
,
1106 unsigned long first_index
, unsigned int max_items
)
1108 struct radix_tree_iter iter
;
1110 unsigned int ret
= 0;
1112 if (unlikely(!max_items
))
1115 radix_tree_for_each_slot(slot
, root
, &iter
, first_index
) {
1116 results
[ret
] = rcu_dereference_raw(*slot
);
1119 if (radix_tree_is_indirect_ptr(results
[ret
])) {
1120 slot
= radix_tree_iter_retry(&iter
);
1123 if (++ret
== max_items
)
1129 EXPORT_SYMBOL(radix_tree_gang_lookup
);
1132 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1133 * @root: radix tree root
1134 * @results: where the results of the lookup are placed
1135 * @indices: where their indices should be placed (but usually NULL)
1136 * @first_index: start the lookup from this key
1137 * @max_items: place up to this many items at *results
1139 * Performs an index-ascending scan of the tree for present items. Places
1140 * their slots at *@results and returns the number of items which were
1141 * placed at *@results.
1143 * The implementation is naive.
1145 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1146 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1147 * protection, radix_tree_deref_slot may fail requiring a retry.
1150 radix_tree_gang_lookup_slot(struct radix_tree_root
*root
,
1151 void ***results
, unsigned long *indices
,
1152 unsigned long first_index
, unsigned int max_items
)
1154 struct radix_tree_iter iter
;
1156 unsigned int ret
= 0;
1158 if (unlikely(!max_items
))
1161 radix_tree_for_each_slot(slot
, root
, &iter
, first_index
) {
1162 results
[ret
] = slot
;
1164 indices
[ret
] = iter
.index
;
1165 if (++ret
== max_items
)
1171 EXPORT_SYMBOL(radix_tree_gang_lookup_slot
);
1174 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1176 * @root: radix tree root
1177 * @results: where the results of the lookup are placed
1178 * @first_index: start the lookup from this key
1179 * @max_items: place up to this many items at *results
1180 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1182 * Performs an index-ascending scan of the tree for present items which
1183 * have the tag indexed by @tag set. Places the items at *@results and
1184 * returns the number of items which were placed at *@results.
1187 radix_tree_gang_lookup_tag(struct radix_tree_root
*root
, void **results
,
1188 unsigned long first_index
, unsigned int max_items
,
1191 struct radix_tree_iter iter
;
1193 unsigned int ret
= 0;
1195 if (unlikely(!max_items
))
1198 radix_tree_for_each_tagged(slot
, root
, &iter
, first_index
, tag
) {
1199 results
[ret
] = rcu_dereference_raw(*slot
);
1202 if (radix_tree_is_indirect_ptr(results
[ret
])) {
1203 slot
= radix_tree_iter_retry(&iter
);
1206 if (++ret
== max_items
)
1212 EXPORT_SYMBOL(radix_tree_gang_lookup_tag
);
1215 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1216 * radix tree based on a tag
1217 * @root: radix tree root
1218 * @results: where the results of the lookup are placed
1219 * @first_index: start the lookup from this key
1220 * @max_items: place up to this many items at *results
1221 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1223 * Performs an index-ascending scan of the tree for present items which
1224 * have the tag indexed by @tag set. Places the slots at *@results and
1225 * returns the number of slots which were placed at *@results.
1228 radix_tree_gang_lookup_tag_slot(struct radix_tree_root
*root
, void ***results
,
1229 unsigned long first_index
, unsigned int max_items
,
1232 struct radix_tree_iter iter
;
1234 unsigned int ret
= 0;
1236 if (unlikely(!max_items
))
1239 radix_tree_for_each_tagged(slot
, root
, &iter
, first_index
, tag
) {
1240 results
[ret
] = slot
;
1241 if (++ret
== max_items
)
1247 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot
);
1249 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1250 #include <linux/sched.h> /* for cond_resched() */
1253 * This linear search is at present only useful to shmem_unuse_inode().
1255 static unsigned long __locate(struct radix_tree_node
*slot
, void *item
,
1256 unsigned long index
, unsigned long *found_index
)
1258 unsigned int shift
, height
;
1261 height
= slot
->path
& RADIX_TREE_HEIGHT_MASK
;
1262 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
1264 for ( ; height
> 1; height
--) {
1265 i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
1267 if (slot
->slots
[i
] != NULL
)
1269 index
&= ~((1UL << shift
) - 1);
1270 index
+= 1UL << shift
;
1272 goto out
; /* 32-bit wraparound */
1274 if (i
== RADIX_TREE_MAP_SIZE
)
1278 slot
= rcu_dereference_raw(slot
->slots
[i
]);
1281 if (!radix_tree_is_indirect_ptr(slot
)) {
1283 *found_index
= index
+ i
;
1290 slot
= indirect_to_ptr(slot
);
1291 shift
-= RADIX_TREE_MAP_SHIFT
;
1294 /* Bottom level: check items */
1295 for (i
= 0; i
< RADIX_TREE_MAP_SIZE
; i
++) {
1296 if (slot
->slots
[i
] == item
) {
1297 *found_index
= index
+ i
;
1302 index
+= RADIX_TREE_MAP_SIZE
;
1308 * radix_tree_locate_item - search through radix tree for item
1309 * @root: radix tree root
1310 * @item: item to be found
1312 * Returns index where item was found, or -1 if not found.
1313 * Caller must hold no lock (since this time-consuming function needs
1314 * to be preemptible), and must check afterwards if item is still there.
1316 unsigned long radix_tree_locate_item(struct radix_tree_root
*root
, void *item
)
1318 struct radix_tree_node
*node
;
1319 unsigned long max_index
;
1320 unsigned long cur_index
= 0;
1321 unsigned long found_index
= -1;
1325 node
= rcu_dereference_raw(root
->rnode
);
1326 if (!radix_tree_is_indirect_ptr(node
)) {
1333 node
= indirect_to_ptr(node
);
1334 max_index
= radix_tree_maxindex(node
->path
&
1335 RADIX_TREE_HEIGHT_MASK
);
1336 if (cur_index
> max_index
) {
1341 cur_index
= __locate(node
, item
, cur_index
, &found_index
);
1344 } while (cur_index
!= 0 && cur_index
<= max_index
);
1349 unsigned long radix_tree_locate_item(struct radix_tree_root
*root
, void *item
)
1353 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1356 * radix_tree_shrink - shrink height of a radix tree to minimal
1357 * @root radix tree root
1359 static inline void radix_tree_shrink(struct radix_tree_root
*root
)
1361 /* try to shrink tree height */
1362 while (root
->height
> 0) {
1363 struct radix_tree_node
*to_free
= root
->rnode
;
1364 struct radix_tree_node
*slot
;
1366 BUG_ON(!radix_tree_is_indirect_ptr(to_free
));
1367 to_free
= indirect_to_ptr(to_free
);
1370 * The candidate node has more than one child, or its child
1371 * is not at the leftmost slot, or it is a multiorder entry,
1374 if (to_free
->count
!= 1)
1376 slot
= to_free
->slots
[0];
1381 * We don't need rcu_assign_pointer(), since we are simply
1382 * moving the node from one part of the tree to another: if it
1383 * was safe to dereference the old pointer to it
1384 * (to_free->slots[0]), it will be safe to dereference the new
1385 * one (root->rnode) as far as dependent read barriers go.
1387 if (root
->height
> 1) {
1388 if (!radix_tree_is_indirect_ptr(slot
))
1391 slot
= indirect_to_ptr(slot
);
1392 slot
->parent
= NULL
;
1393 slot
= ptr_to_indirect(slot
);
1399 * We have a dilemma here. The node's slot[0] must not be
1400 * NULLed in case there are concurrent lookups expecting to
1401 * find the item. However if this was a bottom-level node,
1402 * then it may be subject to the slot pointer being visible
1403 * to callers dereferencing it. If item corresponding to
1404 * slot[0] is subsequently deleted, these callers would expect
1405 * their slot to become empty sooner or later.
1407 * For example, lockless pagecache will look up a slot, deref
1408 * the page pointer, and if the page is 0 refcount it means it
1409 * was concurrently deleted from pagecache so try the deref
1410 * again. Fortunately there is already a requirement for logic
1411 * to retry the entire slot lookup -- the indirect pointer
1412 * problem (replacing direct root node with an indirect pointer
1413 * also results in a stale slot). So tag the slot as indirect
1414 * to force callers to retry.
1416 if (root
->height
== 0)
1417 *((unsigned long *)&to_free
->slots
[0]) |=
1418 RADIX_TREE_INDIRECT_PTR
;
1420 radix_tree_node_free(to_free
);
1425 * __radix_tree_delete_node - try to free node after clearing a slot
1426 * @root: radix tree root
1427 * @node: node containing @index
1429 * After clearing the slot at @index in @node from radix tree
1430 * rooted at @root, call this function to attempt freeing the
1431 * node and shrinking the tree.
1433 * Returns %true if @node was freed, %false otherwise.
1435 bool __radix_tree_delete_node(struct radix_tree_root
*root
,
1436 struct radix_tree_node
*node
)
1438 bool deleted
= false;
1441 struct radix_tree_node
*parent
;
1444 if (node
== indirect_to_ptr(root
->rnode
)) {
1445 radix_tree_shrink(root
);
1446 if (root
->height
== 0)
1452 parent
= node
->parent
;
1454 unsigned int offset
;
1456 offset
= node
->path
>> RADIX_TREE_HEIGHT_SHIFT
;
1457 parent
->slots
[offset
] = NULL
;
1460 root_tag_clear_all(root
);
1465 radix_tree_node_free(node
);
1474 static inline void delete_sibling_entries(struct radix_tree_node
*node
,
1475 void *ptr
, unsigned offset
)
1477 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1479 for (i
= 1; offset
+ i
< RADIX_TREE_MAP_SIZE
; i
++) {
1480 if (node
->slots
[offset
+ i
] != ptr
)
1482 node
->slots
[offset
+ i
] = NULL
;
1489 * radix_tree_delete_item - delete an item from a radix tree
1490 * @root: radix tree root
1492 * @item: expected item
1494 * Remove @item at @index from the radix tree rooted at @root.
1496 * Returns the address of the deleted item, or NULL if it was not present
1497 * or the entry at the given @index was not @item.
1499 void *radix_tree_delete_item(struct radix_tree_root
*root
,
1500 unsigned long index
, void *item
)
1502 struct radix_tree_node
*node
;
1503 unsigned int offset
;
1508 entry
= __radix_tree_lookup(root
, index
, &node
, &slot
);
1512 if (item
&& entry
!= item
)
1516 root_tag_clear_all(root
);
1521 offset
= index
& RADIX_TREE_MAP_MASK
;
1524 * Clear all tags associated with the item to be deleted.
1525 * This way of doing it would be inefficient, but seldom is any set.
1527 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
1528 if (tag_get(node
, tag
, offset
))
1529 radix_tree_tag_clear(root
, index
, tag
);
1532 delete_sibling_entries(node
, ptr_to_indirect(slot
), offset
);
1533 node
->slots
[offset
] = NULL
;
1536 __radix_tree_delete_node(root
, node
);
1540 EXPORT_SYMBOL(radix_tree_delete_item
);
1543 * radix_tree_delete - delete an item from a radix tree
1544 * @root: radix tree root
1547 * Remove the item at @index from the radix tree rooted at @root.
1549 * Returns the address of the deleted item, or NULL if it was not present.
1551 void *radix_tree_delete(struct radix_tree_root
*root
, unsigned long index
)
1553 return radix_tree_delete_item(root
, index
, NULL
);
1555 EXPORT_SYMBOL(radix_tree_delete
);
1558 * radix_tree_tagged - test whether any items in the tree are tagged
1559 * @root: radix tree root
1562 int radix_tree_tagged(struct radix_tree_root
*root
, unsigned int tag
)
1564 return root_tag_get(root
, tag
);
1566 EXPORT_SYMBOL(radix_tree_tagged
);
1569 radix_tree_node_ctor(void *arg
)
1571 struct radix_tree_node
*node
= arg
;
1573 memset(node
, 0, sizeof(*node
));
1574 INIT_LIST_HEAD(&node
->private_list
);
1577 static __init
unsigned long __maxindex(unsigned int height
)
1579 unsigned int width
= height
* RADIX_TREE_MAP_SHIFT
;
1580 int shift
= RADIX_TREE_INDEX_BITS
- width
;
1584 if (shift
>= BITS_PER_LONG
)
1586 return ~0UL >> shift
;
1589 static __init
void radix_tree_init_maxindex(void)
1593 for (i
= 0; i
< ARRAY_SIZE(height_to_maxindex
); i
++)
1594 height_to_maxindex
[i
] = __maxindex(i
);
1597 static int radix_tree_callback(struct notifier_block
*nfb
,
1598 unsigned long action
,
1601 int cpu
= (long)hcpu
;
1602 struct radix_tree_preload
*rtp
;
1603 struct radix_tree_node
*node
;
1605 /* Free per-cpu pool of perloaded nodes */
1606 if (action
== CPU_DEAD
|| action
== CPU_DEAD_FROZEN
) {
1607 rtp
= &per_cpu(radix_tree_preloads
, cpu
);
1610 rtp
->nodes
= node
->private_data
;
1611 kmem_cache_free(radix_tree_node_cachep
, node
);
1618 void __init
radix_tree_init(void)
1620 radix_tree_node_cachep
= kmem_cache_create("radix_tree_node",
1621 sizeof(struct radix_tree_node
), 0,
1622 SLAB_PANIC
| SLAB_RECLAIM_ACCOUNT
,
1623 radix_tree_node_ctor
);
1624 radix_tree_init_maxindex();
1625 hotcpu_notifier(radix_tree_callback
, 0);