2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/kmemleak.h>
31 #include <linux/notifier.h>
32 #include <linux/cpu.h>
33 #include <linux/string.h>
34 #include <linux/bitops.h>
35 #include <linux/rcupdate.h>
36 #include <linux/preempt.h> /* in_interrupt() */
40 * The height_to_maxindex array needs to be one deeper than the maximum
41 * path as height 0 holds only 1 entry.
43 static unsigned long height_to_maxindex
[RADIX_TREE_MAX_PATH
+ 1] __read_mostly
;
46 * Radix tree node cache.
48 static struct kmem_cache
*radix_tree_node_cachep
;
51 * The radix tree is variable-height, so an insert operation not only has
52 * to build the branch to its corresponding item, it also has to build the
53 * branch to existing items if the size has to be increased (by
56 * The worst case is a zero height tree with just a single item at index 0,
57 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
58 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
61 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
64 * Per-cpu pool of preloaded nodes
66 struct radix_tree_preload
{
68 /* nodes->private_data points to next preallocated node */
69 struct radix_tree_node
*nodes
;
71 static DEFINE_PER_CPU(struct radix_tree_preload
, radix_tree_preloads
) = { 0, };
73 static inline void *ptr_to_indirect(void *ptr
)
75 return (void *)((unsigned long)ptr
| RADIX_TREE_INDIRECT_PTR
);
78 static inline void *indirect_to_ptr(void *ptr
)
80 return (void *)((unsigned long)ptr
& ~RADIX_TREE_INDIRECT_PTR
);
83 #ifdef CONFIG_RADIX_TREE_MULTIORDER
84 /* Sibling slots point directly to another slot in the same node */
85 static inline bool is_sibling_entry(struct radix_tree_node
*parent
, void *node
)
88 return (parent
->slots
<= ptr
) &&
89 (ptr
< parent
->slots
+ RADIX_TREE_MAP_SIZE
);
92 static inline bool is_sibling_entry(struct radix_tree_node
*parent
, void *node
)
98 static inline unsigned long get_slot_offset(struct radix_tree_node
*parent
,
101 return slot
- parent
->slots
;
104 static unsigned radix_tree_descend(struct radix_tree_node
*parent
,
105 struct radix_tree_node
**nodep
, unsigned offset
)
107 void **entry
= rcu_dereference_raw(parent
->slots
[offset
]);
109 #ifdef CONFIG_RADIX_TREE_MULTIORDER
110 if (radix_tree_is_indirect_ptr(entry
)) {
111 unsigned long siboff
= get_slot_offset(parent
, entry
);
112 if (siboff
< RADIX_TREE_MAP_SIZE
) {
114 entry
= rcu_dereference_raw(parent
->slots
[offset
]);
119 *nodep
= (void *)entry
;
123 static inline gfp_t
root_gfp_mask(struct radix_tree_root
*root
)
125 return root
->gfp_mask
& __GFP_BITS_MASK
;
128 static inline void tag_set(struct radix_tree_node
*node
, unsigned int tag
,
131 __set_bit(offset
, node
->tags
[tag
]);
134 static inline void tag_clear(struct radix_tree_node
*node
, unsigned int tag
,
137 __clear_bit(offset
, node
->tags
[tag
]);
140 static inline int tag_get(struct radix_tree_node
*node
, unsigned int tag
,
143 return test_bit(offset
, node
->tags
[tag
]);
146 static inline void root_tag_set(struct radix_tree_root
*root
, unsigned int tag
)
148 root
->gfp_mask
|= (__force gfp_t
)(1 << (tag
+ __GFP_BITS_SHIFT
));
151 static inline void root_tag_clear(struct radix_tree_root
*root
, unsigned int tag
)
153 root
->gfp_mask
&= (__force gfp_t
)~(1 << (tag
+ __GFP_BITS_SHIFT
));
156 static inline void root_tag_clear_all(struct radix_tree_root
*root
)
158 root
->gfp_mask
&= __GFP_BITS_MASK
;
161 static inline int root_tag_get(struct radix_tree_root
*root
, unsigned int tag
)
163 return (__force
unsigned)root
->gfp_mask
& (1 << (tag
+ __GFP_BITS_SHIFT
));
167 * Returns 1 if any slot in the node has this tag set.
168 * Otherwise returns 0.
170 static inline int any_tag_set(struct radix_tree_node
*node
, unsigned int tag
)
173 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
174 if (node
->tags
[tag
][idx
])
181 * radix_tree_find_next_bit - find the next set bit in a memory region
183 * @addr: The address to base the search on
184 * @size: The bitmap size in bits
185 * @offset: The bitnumber to start searching at
187 * Unrollable variant of find_next_bit() for constant size arrays.
188 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
189 * Returns next bit offset, or size if nothing found.
191 static __always_inline
unsigned long
192 radix_tree_find_next_bit(const unsigned long *addr
,
193 unsigned long size
, unsigned long offset
)
195 if (!__builtin_constant_p(size
))
196 return find_next_bit(addr
, size
, offset
);
201 addr
+= offset
/ BITS_PER_LONG
;
202 tmp
= *addr
>> (offset
% BITS_PER_LONG
);
204 return __ffs(tmp
) + offset
;
205 offset
= (offset
+ BITS_PER_LONG
) & ~(BITS_PER_LONG
- 1);
206 while (offset
< size
) {
209 return __ffs(tmp
) + offset
;
210 offset
+= BITS_PER_LONG
;
217 static void dump_node(void *slot
, int height
, int offset
)
219 struct radix_tree_node
*node
;
226 pr_debug("radix entry %p offset %d\n", slot
, offset
);
230 node
= indirect_to_ptr(slot
);
231 pr_debug("radix node: %p offset %d tags %lx %lx %lx path %x count %d parent %p\n",
232 slot
, offset
, node
->tags
[0][0], node
->tags
[1][0],
233 node
->tags
[2][0], node
->path
, node
->count
, node
->parent
);
235 for (i
= 0; i
< RADIX_TREE_MAP_SIZE
; i
++)
236 dump_node(node
->slots
[i
], height
- 1, i
);
240 static void radix_tree_dump(struct radix_tree_root
*root
)
242 pr_debug("radix root: %p height %d rnode %p tags %x\n",
243 root
, root
->height
, root
->rnode
,
244 root
->gfp_mask
>> __GFP_BITS_SHIFT
);
245 if (!radix_tree_is_indirect_ptr(root
->rnode
))
247 dump_node(root
->rnode
, root
->height
, 0);
252 * This assumes that the caller has performed appropriate preallocation, and
253 * that the caller has pinned this thread of control to the current CPU.
255 static struct radix_tree_node
*
256 radix_tree_node_alloc(struct radix_tree_root
*root
)
258 struct radix_tree_node
*ret
= NULL
;
259 gfp_t gfp_mask
= root_gfp_mask(root
);
262 * Preload code isn't irq safe and it doesn't make sence to use
263 * preloading in the interrupt anyway as all the allocations have to
264 * be atomic. So just do normal allocation when in interrupt.
266 if (!gfpflags_allow_blocking(gfp_mask
) && !in_interrupt()) {
267 struct radix_tree_preload
*rtp
;
270 * Even if the caller has preloaded, try to allocate from the
271 * cache first for the new node to get accounted.
273 ret
= kmem_cache_alloc(radix_tree_node_cachep
,
274 gfp_mask
| __GFP_ACCOUNT
| __GFP_NOWARN
);
279 * Provided the caller has preloaded here, we will always
280 * succeed in getting a node here (and never reach
283 rtp
= this_cpu_ptr(&radix_tree_preloads
);
286 rtp
->nodes
= ret
->private_data
;
287 ret
->private_data
= NULL
;
291 * Update the allocation stack trace as this is more useful
294 kmemleak_update_trace(ret
);
297 ret
= kmem_cache_alloc(radix_tree_node_cachep
,
298 gfp_mask
| __GFP_ACCOUNT
);
300 BUG_ON(radix_tree_is_indirect_ptr(ret
));
304 static void radix_tree_node_rcu_free(struct rcu_head
*head
)
306 struct radix_tree_node
*node
=
307 container_of(head
, struct radix_tree_node
, rcu_head
);
311 * must only free zeroed nodes into the slab. radix_tree_shrink
312 * can leave us with a non-NULL entry in the first slot, so clear
313 * that here to make sure.
315 for (i
= 0; i
< RADIX_TREE_MAX_TAGS
; i
++)
316 tag_clear(node
, i
, 0);
318 node
->slots
[0] = NULL
;
321 kmem_cache_free(radix_tree_node_cachep
, node
);
325 radix_tree_node_free(struct radix_tree_node
*node
)
327 call_rcu(&node
->rcu_head
, radix_tree_node_rcu_free
);
331 * Load up this CPU's radix_tree_node buffer with sufficient objects to
332 * ensure that the addition of a single element in the tree cannot fail. On
333 * success, return zero, with preemption disabled. On error, return -ENOMEM
334 * with preemption not disabled.
336 * To make use of this facility, the radix tree must be initialised without
337 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
339 static int __radix_tree_preload(gfp_t gfp_mask
)
341 struct radix_tree_preload
*rtp
;
342 struct radix_tree_node
*node
;
346 rtp
= this_cpu_ptr(&radix_tree_preloads
);
347 while (rtp
->nr
< RADIX_TREE_PRELOAD_SIZE
) {
349 node
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
353 rtp
= this_cpu_ptr(&radix_tree_preloads
);
354 if (rtp
->nr
< RADIX_TREE_PRELOAD_SIZE
) {
355 node
->private_data
= rtp
->nodes
;
359 kmem_cache_free(radix_tree_node_cachep
, node
);
368 * Load up this CPU's radix_tree_node buffer with sufficient objects to
369 * ensure that the addition of a single element in the tree cannot fail. On
370 * success, return zero, with preemption disabled. On error, return -ENOMEM
371 * with preemption not disabled.
373 * To make use of this facility, the radix tree must be initialised without
374 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
376 int radix_tree_preload(gfp_t gfp_mask
)
378 /* Warn on non-sensical use... */
379 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask
));
380 return __radix_tree_preload(gfp_mask
);
382 EXPORT_SYMBOL(radix_tree_preload
);
385 * The same as above function, except we don't guarantee preloading happens.
386 * We do it, if we decide it helps. On success, return zero with preemption
387 * disabled. On error, return -ENOMEM with preemption not disabled.
389 int radix_tree_maybe_preload(gfp_t gfp_mask
)
391 if (gfpflags_allow_blocking(gfp_mask
))
392 return __radix_tree_preload(gfp_mask
);
393 /* Preloading doesn't help anything with this gfp mask, skip it */
397 EXPORT_SYMBOL(radix_tree_maybe_preload
);
400 * Return the maximum key which can be store into a
401 * radix tree with height HEIGHT.
403 static inline unsigned long radix_tree_maxindex(unsigned int height
)
405 return height_to_maxindex
[height
];
409 * Extend a radix tree so it can store key @index.
411 static int radix_tree_extend(struct radix_tree_root
*root
,
412 unsigned long index
, unsigned order
)
414 struct radix_tree_node
*node
;
415 struct radix_tree_node
*slot
;
419 /* Figure out what the height should be. */
420 height
= root
->height
+ 1;
421 while (index
> radix_tree_maxindex(height
))
424 if ((root
->rnode
== NULL
) && (order
== 0)) {
425 root
->height
= height
;
430 unsigned int newheight
;
431 if (!(node
= radix_tree_node_alloc(root
)))
434 /* Propagate the aggregated tag info into the new root */
435 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
436 if (root_tag_get(root
, tag
))
437 tag_set(node
, tag
, 0);
440 /* Increase the height. */
441 newheight
= root
->height
+1;
442 BUG_ON(newheight
& ~RADIX_TREE_HEIGHT_MASK
);
443 node
->path
= newheight
;
447 if (radix_tree_is_indirect_ptr(slot
) && newheight
> 1) {
448 slot
= indirect_to_ptr(slot
);
450 slot
= ptr_to_indirect(slot
);
452 node
->slots
[0] = slot
;
453 node
= ptr_to_indirect(node
);
454 rcu_assign_pointer(root
->rnode
, node
);
455 root
->height
= newheight
;
456 } while (height
> root
->height
);
462 * __radix_tree_create - create a slot in a radix tree
463 * @root: radix tree root
465 * @order: index occupies 2^order aligned slots
466 * @nodep: returns node
467 * @slotp: returns slot
469 * Create, if necessary, and return the node and slot for an item
470 * at position @index in the radix tree @root.
472 * Until there is more than one item in the tree, no nodes are
473 * allocated and @root->rnode is used as a direct slot instead of
474 * pointing to a node, in which case *@nodep will be NULL.
476 * Returns -ENOMEM, or 0 for success.
478 int __radix_tree_create(struct radix_tree_root
*root
, unsigned long index
,
479 unsigned order
, struct radix_tree_node
**nodep
,
482 struct radix_tree_node
*node
= NULL
, *slot
;
483 unsigned int height
, shift
, offset
;
486 /* Make sure the tree is high enough. */
487 if (index
> radix_tree_maxindex(root
->height
)) {
488 error
= radix_tree_extend(root
, index
, order
);
495 height
= root
->height
;
496 shift
= height
* RADIX_TREE_MAP_SHIFT
;
498 offset
= 0; /* uninitialised var warning */
499 while (shift
> order
) {
501 /* Have to add a child node. */
502 if (!(slot
= radix_tree_node_alloc(root
)))
507 rcu_assign_pointer(node
->slots
[offset
],
508 ptr_to_indirect(slot
));
510 slot
->path
|= offset
<< RADIX_TREE_HEIGHT_SHIFT
;
512 rcu_assign_pointer(root
->rnode
,
513 ptr_to_indirect(slot
));
514 } else if (!radix_tree_is_indirect_ptr(slot
))
517 /* Go a level down */
519 shift
-= RADIX_TREE_MAP_SHIFT
;
520 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
521 node
= indirect_to_ptr(slot
);
522 slot
= node
->slots
[offset
];
525 #ifdef CONFIG_RADIX_TREE_MULTIORDER
526 /* Insert pointers to the canonical entry */
528 int i
, n
= 1 << (order
- shift
);
529 offset
= offset
& ~(n
- 1);
530 slot
= ptr_to_indirect(&node
->slots
[offset
]);
531 for (i
= 0; i
< n
; i
++) {
532 if (node
->slots
[offset
+ i
])
536 for (i
= 1; i
< n
; i
++) {
537 rcu_assign_pointer(node
->slots
[offset
+ i
], slot
);
546 *slotp
= node
? node
->slots
+ offset
: (void **)&root
->rnode
;
551 * __radix_tree_insert - insert into a radix tree
552 * @root: radix tree root
554 * @order: key covers the 2^order indices around index
555 * @item: item to insert
557 * Insert an item into the radix tree at position @index.
559 int __radix_tree_insert(struct radix_tree_root
*root
, unsigned long index
,
560 unsigned order
, void *item
)
562 struct radix_tree_node
*node
;
566 BUG_ON(radix_tree_is_indirect_ptr(item
));
568 error
= __radix_tree_create(root
, index
, order
, &node
, &slot
);
573 rcu_assign_pointer(*slot
, item
);
577 BUG_ON(tag_get(node
, 0, index
& RADIX_TREE_MAP_MASK
));
578 BUG_ON(tag_get(node
, 1, index
& RADIX_TREE_MAP_MASK
));
580 BUG_ON(root_tag_get(root
, 0));
581 BUG_ON(root_tag_get(root
, 1));
586 EXPORT_SYMBOL(__radix_tree_insert
);
589 * __radix_tree_lookup - lookup an item in a radix tree
590 * @root: radix tree root
592 * @nodep: returns node
593 * @slotp: returns slot
595 * Lookup and return the item at position @index in the radix
598 * Until there is more than one item in the tree, no nodes are
599 * allocated and @root->rnode is used as a direct slot instead of
600 * pointing to a node, in which case *@nodep will be NULL.
602 void *__radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
,
603 struct radix_tree_node
**nodep
, void ***slotp
)
605 struct radix_tree_node
*node
, *parent
;
606 unsigned int height
, shift
;
609 node
= rcu_dereference_raw(root
->rnode
);
613 if (!radix_tree_is_indirect_ptr(node
)) {
620 *slotp
= (void **)&root
->rnode
;
623 node
= indirect_to_ptr(node
);
625 height
= node
->path
& RADIX_TREE_HEIGHT_MASK
;
626 if (index
> radix_tree_maxindex(height
))
629 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
633 slot
= node
->slots
+ ((index
>> shift
) & RADIX_TREE_MAP_MASK
);
634 node
= rcu_dereference_raw(*slot
);
637 if (!radix_tree_is_indirect_ptr(node
))
639 node
= indirect_to_ptr(node
);
641 shift
-= RADIX_TREE_MAP_SHIFT
;
643 } while (height
> 0);
653 * radix_tree_lookup_slot - lookup a slot in a radix tree
654 * @root: radix tree root
657 * Returns: the slot corresponding to the position @index in the
658 * radix tree @root. This is useful for update-if-exists operations.
660 * This function can be called under rcu_read_lock iff the slot is not
661 * modified by radix_tree_replace_slot, otherwise it must be called
662 * exclusive from other writers. Any dereference of the slot must be done
663 * using radix_tree_deref_slot.
665 void **radix_tree_lookup_slot(struct radix_tree_root
*root
, unsigned long index
)
669 if (!__radix_tree_lookup(root
, index
, NULL
, &slot
))
673 EXPORT_SYMBOL(radix_tree_lookup_slot
);
676 * radix_tree_lookup - perform lookup operation on a radix tree
677 * @root: radix tree root
680 * Lookup the item at the position @index in the radix tree @root.
682 * This function can be called under rcu_read_lock, however the caller
683 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
684 * them safely). No RCU barriers are required to access or modify the
685 * returned item, however.
687 void *radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
)
689 return __radix_tree_lookup(root
, index
, NULL
, NULL
);
691 EXPORT_SYMBOL(radix_tree_lookup
);
694 * radix_tree_tag_set - set a tag on a radix tree node
695 * @root: radix tree root
699 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
700 * corresponding to @index in the radix tree. From
701 * the root all the way down to the leaf node.
703 * Returns the address of the tagged item. Setting a tag on a not-present
706 void *radix_tree_tag_set(struct radix_tree_root
*root
,
707 unsigned long index
, unsigned int tag
)
709 unsigned int height
, shift
;
710 struct radix_tree_node
*slot
;
712 height
= root
->height
;
713 BUG_ON(index
> radix_tree_maxindex(height
));
715 slot
= indirect_to_ptr(root
->rnode
);
716 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
721 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
722 if (!tag_get(slot
, tag
, offset
))
723 tag_set(slot
, tag
, offset
);
724 slot
= slot
->slots
[offset
];
725 BUG_ON(slot
== NULL
);
726 if (!radix_tree_is_indirect_ptr(slot
))
728 slot
= indirect_to_ptr(slot
);
729 shift
-= RADIX_TREE_MAP_SHIFT
;
733 /* set the root's tag bit */
734 if (slot
&& !root_tag_get(root
, tag
))
735 root_tag_set(root
, tag
);
739 EXPORT_SYMBOL(radix_tree_tag_set
);
742 * radix_tree_tag_clear - clear a tag on a radix tree node
743 * @root: radix tree root
747 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
748 * corresponding to @index in the radix tree. If
749 * this causes the leaf node to have no tags set then clear the tag in the
750 * next-to-leaf node, etc.
752 * Returns the address of the tagged item on success, else NULL. ie:
753 * has the same return value and semantics as radix_tree_lookup().
755 void *radix_tree_tag_clear(struct radix_tree_root
*root
,
756 unsigned long index
, unsigned int tag
)
758 struct radix_tree_node
*node
= NULL
;
759 struct radix_tree_node
*slot
= NULL
;
760 unsigned int height
, shift
;
761 int uninitialized_var(offset
);
763 height
= root
->height
;
764 if (index
> radix_tree_maxindex(height
))
767 shift
= height
* RADIX_TREE_MAP_SHIFT
;
773 if (!radix_tree_is_indirect_ptr(slot
))
775 slot
= indirect_to_ptr(slot
);
777 shift
-= RADIX_TREE_MAP_SHIFT
;
778 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
780 slot
= slot
->slots
[offset
];
787 if (!tag_get(node
, tag
, offset
))
789 tag_clear(node
, tag
, offset
);
790 if (any_tag_set(node
, tag
))
793 index
>>= RADIX_TREE_MAP_SHIFT
;
794 offset
= index
& RADIX_TREE_MAP_MASK
;
798 /* clear the root's tag bit */
799 if (root_tag_get(root
, tag
))
800 root_tag_clear(root
, tag
);
805 EXPORT_SYMBOL(radix_tree_tag_clear
);
808 * radix_tree_tag_get - get a tag on a radix tree node
809 * @root: radix tree root
811 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
815 * 0: tag not present or not set
818 * Note that the return value of this function may not be relied on, even if
819 * the RCU lock is held, unless tag modification and node deletion are excluded
822 int radix_tree_tag_get(struct radix_tree_root
*root
,
823 unsigned long index
, unsigned int tag
)
825 unsigned int height
, shift
;
826 struct radix_tree_node
*node
;
828 /* check the root's tag bit */
829 if (!root_tag_get(root
, tag
))
832 node
= rcu_dereference_raw(root
->rnode
);
836 if (!radix_tree_is_indirect_ptr(node
))
838 node
= indirect_to_ptr(node
);
840 height
= node
->path
& RADIX_TREE_HEIGHT_MASK
;
841 if (index
> radix_tree_maxindex(height
))
844 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
851 node
= indirect_to_ptr(node
);
853 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
854 if (!tag_get(node
, tag
, offset
))
858 node
= rcu_dereference_raw(node
->slots
[offset
]);
859 if (!radix_tree_is_indirect_ptr(node
))
861 shift
-= RADIX_TREE_MAP_SHIFT
;
865 EXPORT_SYMBOL(radix_tree_tag_get
);
868 * radix_tree_next_chunk - find next chunk of slots for iteration
870 * @root: radix tree root
871 * @iter: iterator state
872 * @flags: RADIX_TREE_ITER_* flags and tag index
873 * Returns: pointer to chunk first slot, or NULL if iteration is over
875 void **radix_tree_next_chunk(struct radix_tree_root
*root
,
876 struct radix_tree_iter
*iter
, unsigned flags
)
878 unsigned shift
, tag
= flags
& RADIX_TREE_ITER_TAG_MASK
;
879 struct radix_tree_node
*rnode
, *node
;
880 unsigned long index
, offset
, height
;
882 if ((flags
& RADIX_TREE_ITER_TAGGED
) && !root_tag_get(root
, tag
))
886 * Catch next_index overflow after ~0UL. iter->index never overflows
887 * during iterating; it can be zero only at the beginning.
888 * And we cannot overflow iter->next_index in a single step,
889 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
891 * This condition also used by radix_tree_next_slot() to stop
892 * contiguous iterating, and forbid swithing to the next chunk.
894 index
= iter
->next_index
;
895 if (!index
&& iter
->index
)
898 rnode
= rcu_dereference_raw(root
->rnode
);
899 if (radix_tree_is_indirect_ptr(rnode
)) {
900 rnode
= indirect_to_ptr(rnode
);
901 } else if (rnode
&& !index
) {
902 /* Single-slot tree */
904 iter
->next_index
= 1;
906 return (void **)&root
->rnode
;
911 height
= rnode
->path
& RADIX_TREE_HEIGHT_MASK
;
912 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
913 offset
= index
>> shift
;
915 /* Index outside of the tree */
916 if (offset
>= RADIX_TREE_MAP_SIZE
)
921 struct radix_tree_node
*slot
;
922 if ((flags
& RADIX_TREE_ITER_TAGGED
) ?
923 !test_bit(offset
, node
->tags
[tag
]) :
924 !node
->slots
[offset
]) {
926 if (flags
& RADIX_TREE_ITER_CONTIG
)
929 if (flags
& RADIX_TREE_ITER_TAGGED
)
930 offset
= radix_tree_find_next_bit(
935 while (++offset
< RADIX_TREE_MAP_SIZE
) {
936 if (node
->slots
[offset
])
939 index
&= ~((RADIX_TREE_MAP_SIZE
<< shift
) - 1);
940 index
+= offset
<< shift
;
941 /* Overflow after ~0UL */
944 if (offset
== RADIX_TREE_MAP_SIZE
)
948 /* This is leaf-node */
952 slot
= rcu_dereference_raw(node
->slots
[offset
]);
955 if (!radix_tree_is_indirect_ptr(slot
))
957 node
= indirect_to_ptr(slot
);
958 shift
-= RADIX_TREE_MAP_SHIFT
;
959 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
962 /* Update the iterator state */
964 iter
->next_index
= (index
| RADIX_TREE_MAP_MASK
) + 1;
966 /* Construct iter->tags bit-mask from node->tags[tag] array */
967 if (flags
& RADIX_TREE_ITER_TAGGED
) {
968 unsigned tag_long
, tag_bit
;
970 tag_long
= offset
/ BITS_PER_LONG
;
971 tag_bit
= offset
% BITS_PER_LONG
;
972 iter
->tags
= node
->tags
[tag
][tag_long
] >> tag_bit
;
973 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
974 if (tag_long
< RADIX_TREE_TAG_LONGS
- 1) {
975 /* Pick tags from next element */
977 iter
->tags
|= node
->tags
[tag
][tag_long
+ 1] <<
978 (BITS_PER_LONG
- tag_bit
);
979 /* Clip chunk size, here only BITS_PER_LONG tags */
980 iter
->next_index
= index
+ BITS_PER_LONG
;
984 return node
->slots
+ offset
;
986 EXPORT_SYMBOL(radix_tree_next_chunk
);
989 * radix_tree_range_tag_if_tagged - for each item in given range set given
990 * tag if item has another tag set
991 * @root: radix tree root
992 * @first_indexp: pointer to a starting index of a range to scan
993 * @last_index: last index of a range to scan
994 * @nr_to_tag: maximum number items to tag
995 * @iftag: tag index to test
996 * @settag: tag index to set if tested tag is set
998 * This function scans range of radix tree from first_index to last_index
999 * (inclusive). For each item in the range if iftag is set, the function sets
1000 * also settag. The function stops either after tagging nr_to_tag items or
1001 * after reaching last_index.
1003 * The tags must be set from the leaf level only and propagated back up the
1004 * path to the root. We must do this so that we resolve the full path before
1005 * setting any tags on intermediate nodes. If we set tags as we descend, then
1006 * we can get to the leaf node and find that the index that has the iftag
1007 * set is outside the range we are scanning. This reults in dangling tags and
1008 * can lead to problems with later tag operations (e.g. livelocks on lookups).
1010 * The function returns number of leaves where the tag was set and sets
1011 * *first_indexp to the first unscanned index.
1012 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
1013 * be prepared to handle that.
1015 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root
*root
,
1016 unsigned long *first_indexp
, unsigned long last_index
,
1017 unsigned long nr_to_tag
,
1018 unsigned int iftag
, unsigned int settag
)
1020 unsigned int height
= root
->height
;
1021 struct radix_tree_node
*node
= NULL
;
1022 struct radix_tree_node
*slot
;
1024 unsigned long tagged
= 0;
1025 unsigned long index
= *first_indexp
;
1027 last_index
= min(last_index
, radix_tree_maxindex(height
));
1028 if (index
> last_index
)
1032 if (!root_tag_get(root
, iftag
)) {
1033 *first_indexp
= last_index
+ 1;
1037 *first_indexp
= last_index
+ 1;
1038 root_tag_set(root
, settag
);
1042 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
1043 slot
= indirect_to_ptr(root
->rnode
);
1046 unsigned long upindex
;
1049 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
1050 if (!slot
->slots
[offset
])
1052 if (!tag_get(slot
, iftag
, offset
))
1056 slot
= slot
->slots
[offset
];
1057 if (radix_tree_is_indirect_ptr(slot
)) {
1058 slot
= indirect_to_ptr(slot
);
1059 shift
-= RADIX_TREE_MAP_SHIFT
;
1063 node
= node
->parent
;
1068 tagged
+= 1 << shift
;
1069 tag_set(slot
, settag
, offset
);
1071 /* walk back up the path tagging interior nodes */
1074 upindex
>>= RADIX_TREE_MAP_SHIFT
;
1075 offset
= upindex
& RADIX_TREE_MAP_MASK
;
1077 /* stop if we find a node with the tag already set */
1078 if (tag_get(node
, settag
, offset
))
1080 tag_set(node
, settag
, offset
);
1081 node
= node
->parent
;
1085 * Small optimization: now clear that node pointer.
1086 * Since all of this slot's ancestors now have the tag set
1087 * from setting it above, we have no further need to walk
1088 * back up the tree setting tags, until we update slot to
1089 * point to another radix_tree_node.
1094 /* Go to next item at level determined by 'shift' */
1095 index
= ((index
>> shift
) + 1) << shift
;
1096 /* Overflow can happen when last_index is ~0UL... */
1097 if (index
> last_index
|| !index
)
1099 if (tagged
>= nr_to_tag
)
1101 while (((index
>> shift
) & RADIX_TREE_MAP_MASK
) == 0) {
1103 * We've fully scanned this node. Go up. Because
1104 * last_index is guaranteed to be in the tree, what
1105 * we do below cannot wander astray.
1107 slot
= slot
->parent
;
1108 shift
+= RADIX_TREE_MAP_SHIFT
;
1112 * We need not to tag the root tag if there is no tag which is set with
1113 * settag within the range from *first_indexp to last_index.
1116 root_tag_set(root
, settag
);
1117 *first_indexp
= index
;
1121 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged
);
1124 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1125 * @root: radix tree root
1126 * @results: where the results of the lookup are placed
1127 * @first_index: start the lookup from this key
1128 * @max_items: place up to this many items at *results
1130 * Performs an index-ascending scan of the tree for present items. Places
1131 * them at *@results and returns the number of items which were placed at
1134 * The implementation is naive.
1136 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1137 * rcu_read_lock. In this case, rather than the returned results being
1138 * an atomic snapshot of the tree at a single point in time, the semantics
1139 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
1140 * have been issued in individual locks, and results stored in 'results'.
1143 radix_tree_gang_lookup(struct radix_tree_root
*root
, void **results
,
1144 unsigned long first_index
, unsigned int max_items
)
1146 struct radix_tree_iter iter
;
1148 unsigned int ret
= 0;
1150 if (unlikely(!max_items
))
1153 radix_tree_for_each_slot(slot
, root
, &iter
, first_index
) {
1154 results
[ret
] = rcu_dereference_raw(*slot
);
1157 if (radix_tree_is_indirect_ptr(results
[ret
])) {
1158 slot
= radix_tree_iter_retry(&iter
);
1161 if (++ret
== max_items
)
1167 EXPORT_SYMBOL(radix_tree_gang_lookup
);
1170 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1171 * @root: radix tree root
1172 * @results: where the results of the lookup are placed
1173 * @indices: where their indices should be placed (but usually NULL)
1174 * @first_index: start the lookup from this key
1175 * @max_items: place up to this many items at *results
1177 * Performs an index-ascending scan of the tree for present items. Places
1178 * their slots at *@results and returns the number of items which were
1179 * placed at *@results.
1181 * The implementation is naive.
1183 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1184 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1185 * protection, radix_tree_deref_slot may fail requiring a retry.
1188 radix_tree_gang_lookup_slot(struct radix_tree_root
*root
,
1189 void ***results
, unsigned long *indices
,
1190 unsigned long first_index
, unsigned int max_items
)
1192 struct radix_tree_iter iter
;
1194 unsigned int ret
= 0;
1196 if (unlikely(!max_items
))
1199 radix_tree_for_each_slot(slot
, root
, &iter
, first_index
) {
1200 results
[ret
] = slot
;
1202 indices
[ret
] = iter
.index
;
1203 if (++ret
== max_items
)
1209 EXPORT_SYMBOL(radix_tree_gang_lookup_slot
);
1212 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1214 * @root: radix tree root
1215 * @results: where the results of the lookup are placed
1216 * @first_index: start the lookup from this key
1217 * @max_items: place up to this many items at *results
1218 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1220 * Performs an index-ascending scan of the tree for present items which
1221 * have the tag indexed by @tag set. Places the items at *@results and
1222 * returns the number of items which were placed at *@results.
1225 radix_tree_gang_lookup_tag(struct radix_tree_root
*root
, void **results
,
1226 unsigned long first_index
, unsigned int max_items
,
1229 struct radix_tree_iter iter
;
1231 unsigned int ret
= 0;
1233 if (unlikely(!max_items
))
1236 radix_tree_for_each_tagged(slot
, root
, &iter
, first_index
, tag
) {
1237 results
[ret
] = rcu_dereference_raw(*slot
);
1240 if (radix_tree_is_indirect_ptr(results
[ret
])) {
1241 slot
= radix_tree_iter_retry(&iter
);
1244 if (++ret
== max_items
)
1250 EXPORT_SYMBOL(radix_tree_gang_lookup_tag
);
1253 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1254 * radix tree based on a tag
1255 * @root: radix tree root
1256 * @results: where the results of the lookup are placed
1257 * @first_index: start the lookup from this key
1258 * @max_items: place up to this many items at *results
1259 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1261 * Performs an index-ascending scan of the tree for present items which
1262 * have the tag indexed by @tag set. Places the slots at *@results and
1263 * returns the number of slots which were placed at *@results.
1266 radix_tree_gang_lookup_tag_slot(struct radix_tree_root
*root
, void ***results
,
1267 unsigned long first_index
, unsigned int max_items
,
1270 struct radix_tree_iter iter
;
1272 unsigned int ret
= 0;
1274 if (unlikely(!max_items
))
1277 radix_tree_for_each_tagged(slot
, root
, &iter
, first_index
, tag
) {
1278 results
[ret
] = slot
;
1279 if (++ret
== max_items
)
1285 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot
);
1287 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1288 #include <linux/sched.h> /* for cond_resched() */
1291 * This linear search is at present only useful to shmem_unuse_inode().
1293 static unsigned long __locate(struct radix_tree_node
*slot
, void *item
,
1294 unsigned long index
, unsigned long *found_index
)
1296 unsigned int shift
, height
;
1299 height
= slot
->path
& RADIX_TREE_HEIGHT_MASK
;
1300 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
1302 for ( ; height
> 1; height
--) {
1303 i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
1305 if (slot
->slots
[i
] != NULL
)
1307 index
&= ~((1UL << shift
) - 1);
1308 index
+= 1UL << shift
;
1310 goto out
; /* 32-bit wraparound */
1312 if (i
== RADIX_TREE_MAP_SIZE
)
1316 slot
= rcu_dereference_raw(slot
->slots
[i
]);
1319 if (!radix_tree_is_indirect_ptr(slot
)) {
1321 *found_index
= index
+ i
;
1328 slot
= indirect_to_ptr(slot
);
1329 shift
-= RADIX_TREE_MAP_SHIFT
;
1332 /* Bottom level: check items */
1333 for (i
= 0; i
< RADIX_TREE_MAP_SIZE
; i
++) {
1334 if (slot
->slots
[i
] == item
) {
1335 *found_index
= index
+ i
;
1340 index
+= RADIX_TREE_MAP_SIZE
;
1346 * radix_tree_locate_item - search through radix tree for item
1347 * @root: radix tree root
1348 * @item: item to be found
1350 * Returns index where item was found, or -1 if not found.
1351 * Caller must hold no lock (since this time-consuming function needs
1352 * to be preemptible), and must check afterwards if item is still there.
1354 unsigned long radix_tree_locate_item(struct radix_tree_root
*root
, void *item
)
1356 struct radix_tree_node
*node
;
1357 unsigned long max_index
;
1358 unsigned long cur_index
= 0;
1359 unsigned long found_index
= -1;
1363 node
= rcu_dereference_raw(root
->rnode
);
1364 if (!radix_tree_is_indirect_ptr(node
)) {
1371 node
= indirect_to_ptr(node
);
1372 max_index
= radix_tree_maxindex(node
->path
&
1373 RADIX_TREE_HEIGHT_MASK
);
1374 if (cur_index
> max_index
) {
1379 cur_index
= __locate(node
, item
, cur_index
, &found_index
);
1382 } while (cur_index
!= 0 && cur_index
<= max_index
);
1387 unsigned long radix_tree_locate_item(struct radix_tree_root
*root
, void *item
)
1391 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1394 * radix_tree_shrink - shrink height of a radix tree to minimal
1395 * @root radix tree root
1397 static inline void radix_tree_shrink(struct radix_tree_root
*root
)
1399 /* try to shrink tree height */
1400 while (root
->height
> 0) {
1401 struct radix_tree_node
*to_free
= root
->rnode
;
1402 struct radix_tree_node
*slot
;
1404 BUG_ON(!radix_tree_is_indirect_ptr(to_free
));
1405 to_free
= indirect_to_ptr(to_free
);
1408 * The candidate node has more than one child, or its child
1409 * is not at the leftmost slot, or it is a multiorder entry,
1412 if (to_free
->count
!= 1)
1414 slot
= to_free
->slots
[0];
1419 * We don't need rcu_assign_pointer(), since we are simply
1420 * moving the node from one part of the tree to another: if it
1421 * was safe to dereference the old pointer to it
1422 * (to_free->slots[0]), it will be safe to dereference the new
1423 * one (root->rnode) as far as dependent read barriers go.
1425 if (root
->height
> 1) {
1426 if (!radix_tree_is_indirect_ptr(slot
))
1429 slot
= indirect_to_ptr(slot
);
1430 slot
->parent
= NULL
;
1431 slot
= ptr_to_indirect(slot
);
1437 * We have a dilemma here. The node's slot[0] must not be
1438 * NULLed in case there are concurrent lookups expecting to
1439 * find the item. However if this was a bottom-level node,
1440 * then it may be subject to the slot pointer being visible
1441 * to callers dereferencing it. If item corresponding to
1442 * slot[0] is subsequently deleted, these callers would expect
1443 * their slot to become empty sooner or later.
1445 * For example, lockless pagecache will look up a slot, deref
1446 * the page pointer, and if the page is 0 refcount it means it
1447 * was concurrently deleted from pagecache so try the deref
1448 * again. Fortunately there is already a requirement for logic
1449 * to retry the entire slot lookup -- the indirect pointer
1450 * problem (replacing direct root node with an indirect pointer
1451 * also results in a stale slot). So tag the slot as indirect
1452 * to force callers to retry.
1454 if (root
->height
== 0)
1455 *((unsigned long *)&to_free
->slots
[0]) |=
1456 RADIX_TREE_INDIRECT_PTR
;
1458 radix_tree_node_free(to_free
);
1463 * __radix_tree_delete_node - try to free node after clearing a slot
1464 * @root: radix tree root
1465 * @node: node containing @index
1467 * After clearing the slot at @index in @node from radix tree
1468 * rooted at @root, call this function to attempt freeing the
1469 * node and shrinking the tree.
1471 * Returns %true if @node was freed, %false otherwise.
1473 bool __radix_tree_delete_node(struct radix_tree_root
*root
,
1474 struct radix_tree_node
*node
)
1476 bool deleted
= false;
1479 struct radix_tree_node
*parent
;
1482 if (node
== indirect_to_ptr(root
->rnode
)) {
1483 radix_tree_shrink(root
);
1484 if (root
->height
== 0)
1490 parent
= node
->parent
;
1492 unsigned int offset
;
1494 offset
= node
->path
>> RADIX_TREE_HEIGHT_SHIFT
;
1495 parent
->slots
[offset
] = NULL
;
1498 root_tag_clear_all(root
);
1503 radix_tree_node_free(node
);
1512 static inline void delete_sibling_entries(struct radix_tree_node
*node
,
1513 void *ptr
, unsigned offset
)
1515 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1517 for (i
= 1; offset
+ i
< RADIX_TREE_MAP_SIZE
; i
++) {
1518 if (node
->slots
[offset
+ i
] != ptr
)
1520 node
->slots
[offset
+ i
] = NULL
;
1527 * radix_tree_delete_item - delete an item from a radix tree
1528 * @root: radix tree root
1530 * @item: expected item
1532 * Remove @item at @index from the radix tree rooted at @root.
1534 * Returns the address of the deleted item, or NULL if it was not present
1535 * or the entry at the given @index was not @item.
1537 void *radix_tree_delete_item(struct radix_tree_root
*root
,
1538 unsigned long index
, void *item
)
1540 struct radix_tree_node
*node
;
1541 unsigned int offset
;
1546 entry
= __radix_tree_lookup(root
, index
, &node
, &slot
);
1550 if (item
&& entry
!= item
)
1554 root_tag_clear_all(root
);
1559 offset
= get_slot_offset(node
, slot
);
1562 * Clear all tags associated with the item to be deleted.
1563 * This way of doing it would be inefficient, but seldom is any set.
1565 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
1566 if (tag_get(node
, tag
, offset
))
1567 radix_tree_tag_clear(root
, index
, tag
);
1570 delete_sibling_entries(node
, ptr_to_indirect(slot
), offset
);
1571 node
->slots
[offset
] = NULL
;
1574 __radix_tree_delete_node(root
, node
);
1578 EXPORT_SYMBOL(radix_tree_delete_item
);
1581 * radix_tree_delete - delete an item from a radix tree
1582 * @root: radix tree root
1585 * Remove the item at @index from the radix tree rooted at @root.
1587 * Returns the address of the deleted item, or NULL if it was not present.
1589 void *radix_tree_delete(struct radix_tree_root
*root
, unsigned long index
)
1591 return radix_tree_delete_item(root
, index
, NULL
);
1593 EXPORT_SYMBOL(radix_tree_delete
);
1596 * radix_tree_tagged - test whether any items in the tree are tagged
1597 * @root: radix tree root
1600 int radix_tree_tagged(struct radix_tree_root
*root
, unsigned int tag
)
1602 return root_tag_get(root
, tag
);
1604 EXPORT_SYMBOL(radix_tree_tagged
);
1607 radix_tree_node_ctor(void *arg
)
1609 struct radix_tree_node
*node
= arg
;
1611 memset(node
, 0, sizeof(*node
));
1612 INIT_LIST_HEAD(&node
->private_list
);
1615 static __init
unsigned long __maxindex(unsigned int height
)
1617 unsigned int width
= height
* RADIX_TREE_MAP_SHIFT
;
1618 int shift
= RADIX_TREE_INDEX_BITS
- width
;
1622 if (shift
>= BITS_PER_LONG
)
1624 return ~0UL >> shift
;
1627 static __init
void radix_tree_init_maxindex(void)
1631 for (i
= 0; i
< ARRAY_SIZE(height_to_maxindex
); i
++)
1632 height_to_maxindex
[i
] = __maxindex(i
);
1635 static int radix_tree_callback(struct notifier_block
*nfb
,
1636 unsigned long action
,
1639 int cpu
= (long)hcpu
;
1640 struct radix_tree_preload
*rtp
;
1641 struct radix_tree_node
*node
;
1643 /* Free per-cpu pool of perloaded nodes */
1644 if (action
== CPU_DEAD
|| action
== CPU_DEAD_FROZEN
) {
1645 rtp
= &per_cpu(radix_tree_preloads
, cpu
);
1648 rtp
->nodes
= node
->private_data
;
1649 kmem_cache_free(radix_tree_node_cachep
, node
);
1656 void __init
radix_tree_init(void)
1658 radix_tree_node_cachep
= kmem_cache_create("radix_tree_node",
1659 sizeof(struct radix_tree_node
), 0,
1660 SLAB_PANIC
| SLAB_RECLAIM_ACCOUNT
,
1661 radix_tree_node_ctor
);
1662 radix_tree_init_maxindex();
1663 hotcpu_notifier(radix_tree_callback
, 0);