2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/notifier.h>
31 #include <linux/cpu.h>
32 #include <linux/string.h>
33 #include <linux/bitops.h>
34 #include <linux/rcupdate.h>
35 #include <linux/hardirq.h> /* in_interrupt() */
39 * The height_to_maxindex array needs to be one deeper than the maximum
40 * path as height 0 holds only 1 entry.
42 static unsigned long height_to_maxindex
[RADIX_TREE_MAX_PATH
+ 1] __read_mostly
;
45 * Radix tree node cache.
47 static struct kmem_cache
*radix_tree_node_cachep
;
50 * The radix tree is variable-height, so an insert operation not only has
51 * to build the branch to its corresponding item, it also has to build the
52 * branch to existing items if the size has to be increased (by
55 * The worst case is a zero height tree with just a single item at index 0,
56 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
57 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
60 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
63 * Per-cpu pool of preloaded nodes
65 struct radix_tree_preload
{
67 struct radix_tree_node
*nodes
[RADIX_TREE_PRELOAD_SIZE
];
69 static DEFINE_PER_CPU(struct radix_tree_preload
, radix_tree_preloads
) = { 0, };
71 static inline void *ptr_to_indirect(void *ptr
)
73 return (void *)((unsigned long)ptr
| RADIX_TREE_INDIRECT_PTR
);
76 static inline void *indirect_to_ptr(void *ptr
)
78 return (void *)((unsigned long)ptr
& ~RADIX_TREE_INDIRECT_PTR
);
81 static inline gfp_t
root_gfp_mask(struct radix_tree_root
*root
)
83 return root
->gfp_mask
& __GFP_BITS_MASK
;
86 static inline void tag_set(struct radix_tree_node
*node
, unsigned int tag
,
89 __set_bit(offset
, node
->tags
[tag
]);
92 static inline void tag_clear(struct radix_tree_node
*node
, unsigned int tag
,
95 __clear_bit(offset
, node
->tags
[tag
]);
98 static inline int tag_get(struct radix_tree_node
*node
, unsigned int tag
,
101 return test_bit(offset
, node
->tags
[tag
]);
104 static inline void root_tag_set(struct radix_tree_root
*root
, unsigned int tag
)
106 root
->gfp_mask
|= (__force gfp_t
)(1 << (tag
+ __GFP_BITS_SHIFT
));
109 static inline void root_tag_clear(struct radix_tree_root
*root
, unsigned int tag
)
111 root
->gfp_mask
&= (__force gfp_t
)~(1 << (tag
+ __GFP_BITS_SHIFT
));
114 static inline void root_tag_clear_all(struct radix_tree_root
*root
)
116 root
->gfp_mask
&= __GFP_BITS_MASK
;
119 static inline int root_tag_get(struct radix_tree_root
*root
, unsigned int tag
)
121 return (__force
unsigned)root
->gfp_mask
& (1 << (tag
+ __GFP_BITS_SHIFT
));
125 * Returns 1 if any slot in the node has this tag set.
126 * Otherwise returns 0.
128 static inline int any_tag_set(struct radix_tree_node
*node
, unsigned int tag
)
131 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
132 if (node
->tags
[tag
][idx
])
139 * radix_tree_find_next_bit - find the next set bit in a memory region
141 * @addr: The address to base the search on
142 * @size: The bitmap size in bits
143 * @offset: The bitnumber to start searching at
145 * Unrollable variant of find_next_bit() for constant size arrays.
146 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
147 * Returns next bit offset, or size if nothing found.
149 static __always_inline
unsigned long
150 radix_tree_find_next_bit(const unsigned long *addr
,
151 unsigned long size
, unsigned long offset
)
153 if (!__builtin_constant_p(size
))
154 return find_next_bit(addr
, size
, offset
);
159 addr
+= offset
/ BITS_PER_LONG
;
160 tmp
= *addr
>> (offset
% BITS_PER_LONG
);
162 return __ffs(tmp
) + offset
;
163 offset
= (offset
+ BITS_PER_LONG
) & ~(BITS_PER_LONG
- 1);
164 while (offset
< size
) {
167 return __ffs(tmp
) + offset
;
168 offset
+= BITS_PER_LONG
;
175 * This assumes that the caller has performed appropriate preallocation, and
176 * that the caller has pinned this thread of control to the current CPU.
178 static struct radix_tree_node
*
179 radix_tree_node_alloc(struct radix_tree_root
*root
)
181 struct radix_tree_node
*ret
= NULL
;
182 gfp_t gfp_mask
= root_gfp_mask(root
);
185 * Preload code isn't irq safe and it doesn't make sence to use
186 * preloading in the interrupt anyway as all the allocations have to
187 * be atomic. So just do normal allocation when in interrupt.
189 if (!(gfp_mask
& __GFP_WAIT
) && !in_interrupt()) {
190 struct radix_tree_preload
*rtp
;
193 * Provided the caller has preloaded here, we will always
194 * succeed in getting a node here (and never reach
197 rtp
= &__get_cpu_var(radix_tree_preloads
);
199 ret
= rtp
->nodes
[rtp
->nr
- 1];
200 rtp
->nodes
[rtp
->nr
- 1] = NULL
;
205 ret
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
207 BUG_ON(radix_tree_is_indirect_ptr(ret
));
211 static void radix_tree_node_rcu_free(struct rcu_head
*head
)
213 struct radix_tree_node
*node
=
214 container_of(head
, struct radix_tree_node
, rcu_head
);
218 * must only free zeroed nodes into the slab. radix_tree_shrink
219 * can leave us with a non-NULL entry in the first slot, so clear
220 * that here to make sure.
222 for (i
= 0; i
< RADIX_TREE_MAX_TAGS
; i
++)
223 tag_clear(node
, i
, 0);
225 node
->slots
[0] = NULL
;
228 kmem_cache_free(radix_tree_node_cachep
, node
);
232 radix_tree_node_free(struct radix_tree_node
*node
)
234 call_rcu(&node
->rcu_head
, radix_tree_node_rcu_free
);
238 * Load up this CPU's radix_tree_node buffer with sufficient objects to
239 * ensure that the addition of a single element in the tree cannot fail. On
240 * success, return zero, with preemption disabled. On error, return -ENOMEM
241 * with preemption not disabled.
243 * To make use of this facility, the radix tree must be initialised without
244 * __GFP_WAIT being passed to INIT_RADIX_TREE().
246 static int __radix_tree_preload(gfp_t gfp_mask
)
248 struct radix_tree_preload
*rtp
;
249 struct radix_tree_node
*node
;
253 rtp
= &__get_cpu_var(radix_tree_preloads
);
254 while (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
)) {
256 node
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
260 rtp
= &__get_cpu_var(radix_tree_preloads
);
261 if (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
))
262 rtp
->nodes
[rtp
->nr
++] = node
;
264 kmem_cache_free(radix_tree_node_cachep
, node
);
272 * Load up this CPU's radix_tree_node buffer with sufficient objects to
273 * ensure that the addition of a single element in the tree cannot fail. On
274 * success, return zero, with preemption disabled. On error, return -ENOMEM
275 * with preemption not disabled.
277 * To make use of this facility, the radix tree must be initialised without
278 * __GFP_WAIT being passed to INIT_RADIX_TREE().
280 int radix_tree_preload(gfp_t gfp_mask
)
282 /* Warn on non-sensical use... */
283 WARN_ON_ONCE(!(gfp_mask
& __GFP_WAIT
));
284 return __radix_tree_preload(gfp_mask
);
286 EXPORT_SYMBOL(radix_tree_preload
);
289 * The same as above function, except we don't guarantee preloading happens.
290 * We do it, if we decide it helps. On success, return zero with preemption
291 * disabled. On error, return -ENOMEM with preemption not disabled.
293 int radix_tree_maybe_preload(gfp_t gfp_mask
)
295 if (gfp_mask
& __GFP_WAIT
)
296 return __radix_tree_preload(gfp_mask
);
297 /* Preloading doesn't help anything with this gfp mask, skip it */
301 EXPORT_SYMBOL(radix_tree_maybe_preload
);
304 * Return the maximum key which can be store into a
305 * radix tree with height HEIGHT.
307 static inline unsigned long radix_tree_maxindex(unsigned int height
)
309 return height_to_maxindex
[height
];
313 * Extend a radix tree so it can store key @index.
315 static int radix_tree_extend(struct radix_tree_root
*root
, unsigned long index
)
317 struct radix_tree_node
*node
;
318 struct radix_tree_node
*slot
;
322 /* Figure out what the height should be. */
323 height
= root
->height
+ 1;
324 while (index
> radix_tree_maxindex(height
))
327 if (root
->rnode
== NULL
) {
328 root
->height
= height
;
333 unsigned int newheight
;
334 if (!(node
= radix_tree_node_alloc(root
)))
337 /* Propagate the aggregated tag info into the new root */
338 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
339 if (root_tag_get(root
, tag
))
340 tag_set(node
, tag
, 0);
343 /* Increase the height. */
344 newheight
= root
->height
+1;
345 BUG_ON(newheight
& ~RADIX_TREE_HEIGHT_MASK
);
346 node
->path
= newheight
;
351 slot
= indirect_to_ptr(slot
);
354 node
->slots
[0] = slot
;
355 node
= ptr_to_indirect(node
);
356 rcu_assign_pointer(root
->rnode
, node
);
357 root
->height
= newheight
;
358 } while (height
> root
->height
);
364 * __radix_tree_create - create a slot in a radix tree
365 * @root: radix tree root
367 * @nodep: returns node
368 * @slotp: returns slot
370 * Create, if necessary, and return the node and slot for an item
371 * at position @index in the radix tree @root.
373 * Until there is more than one item in the tree, no nodes are
374 * allocated and @root->rnode is used as a direct slot instead of
375 * pointing to a node, in which case *@nodep will be NULL.
377 * Returns -ENOMEM, or 0 for success.
379 int __radix_tree_create(struct radix_tree_root
*root
, unsigned long index
,
380 struct radix_tree_node
**nodep
, void ***slotp
)
382 struct radix_tree_node
*node
= NULL
, *slot
;
383 unsigned int height
, shift
, offset
;
386 /* Make sure the tree is high enough. */
387 if (index
> radix_tree_maxindex(root
->height
)) {
388 error
= radix_tree_extend(root
, index
);
393 slot
= indirect_to_ptr(root
->rnode
);
395 height
= root
->height
;
396 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
398 offset
= 0; /* uninitialised var warning */
401 /* Have to add a child node. */
402 if (!(slot
= radix_tree_node_alloc(root
)))
407 rcu_assign_pointer(node
->slots
[offset
], slot
);
409 slot
->path
|= offset
<< RADIX_TREE_HEIGHT_SHIFT
;
411 rcu_assign_pointer(root
->rnode
, ptr_to_indirect(slot
));
414 /* Go a level down */
415 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
417 slot
= node
->slots
[offset
];
418 shift
-= RADIX_TREE_MAP_SHIFT
;
425 *slotp
= node
? node
->slots
+ offset
: (void **)&root
->rnode
;
430 * radix_tree_insert - insert into a radix tree
431 * @root: radix tree root
433 * @item: item to insert
435 * Insert an item into the radix tree at position @index.
437 int radix_tree_insert(struct radix_tree_root
*root
,
438 unsigned long index
, void *item
)
440 struct radix_tree_node
*node
;
444 BUG_ON(radix_tree_is_indirect_ptr(item
));
446 error
= __radix_tree_create(root
, index
, &node
, &slot
);
451 rcu_assign_pointer(*slot
, item
);
455 BUG_ON(tag_get(node
, 0, index
& RADIX_TREE_MAP_MASK
));
456 BUG_ON(tag_get(node
, 1, index
& RADIX_TREE_MAP_MASK
));
458 BUG_ON(root_tag_get(root
, 0));
459 BUG_ON(root_tag_get(root
, 1));
464 EXPORT_SYMBOL(radix_tree_insert
);
467 * __radix_tree_lookup - lookup an item in a radix tree
468 * @root: radix tree root
470 * @nodep: returns node
471 * @slotp: returns slot
473 * Lookup and return the item at position @index in the radix
476 * Until there is more than one item in the tree, no nodes are
477 * allocated and @root->rnode is used as a direct slot instead of
478 * pointing to a node, in which case *@nodep will be NULL.
480 void *__radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
,
481 struct radix_tree_node
**nodep
, void ***slotp
)
483 struct radix_tree_node
*node
, *parent
;
484 unsigned int height
, shift
;
487 node
= rcu_dereference_raw(root
->rnode
);
491 if (!radix_tree_is_indirect_ptr(node
)) {
498 *slotp
= (void **)&root
->rnode
;
501 node
= indirect_to_ptr(node
);
503 height
= node
->path
& RADIX_TREE_HEIGHT_MASK
;
504 if (index
> radix_tree_maxindex(height
))
507 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
511 slot
= node
->slots
+ ((index
>> shift
) & RADIX_TREE_MAP_MASK
);
512 node
= rcu_dereference_raw(*slot
);
516 shift
-= RADIX_TREE_MAP_SHIFT
;
518 } while (height
> 0);
528 * radix_tree_lookup_slot - lookup a slot in a radix tree
529 * @root: radix tree root
532 * Returns: the slot corresponding to the position @index in the
533 * radix tree @root. This is useful for update-if-exists operations.
535 * This function can be called under rcu_read_lock iff the slot is not
536 * modified by radix_tree_replace_slot, otherwise it must be called
537 * exclusive from other writers. Any dereference of the slot must be done
538 * using radix_tree_deref_slot.
540 void **radix_tree_lookup_slot(struct radix_tree_root
*root
, unsigned long index
)
544 if (!__radix_tree_lookup(root
, index
, NULL
, &slot
))
548 EXPORT_SYMBOL(radix_tree_lookup_slot
);
551 * radix_tree_lookup - perform lookup operation on a radix tree
552 * @root: radix tree root
555 * Lookup the item at the position @index in the radix tree @root.
557 * This function can be called under rcu_read_lock, however the caller
558 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
559 * them safely). No RCU barriers are required to access or modify the
560 * returned item, however.
562 void *radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
)
564 return __radix_tree_lookup(root
, index
, NULL
, NULL
);
566 EXPORT_SYMBOL(radix_tree_lookup
);
569 * radix_tree_tag_set - set a tag on a radix tree node
570 * @root: radix tree root
574 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
575 * corresponding to @index in the radix tree. From
576 * the root all the way down to the leaf node.
578 * Returns the address of the tagged item. Setting a tag on a not-present
581 void *radix_tree_tag_set(struct radix_tree_root
*root
,
582 unsigned long index
, unsigned int tag
)
584 unsigned int height
, shift
;
585 struct radix_tree_node
*slot
;
587 height
= root
->height
;
588 BUG_ON(index
> radix_tree_maxindex(height
));
590 slot
= indirect_to_ptr(root
->rnode
);
591 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
596 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
597 if (!tag_get(slot
, tag
, offset
))
598 tag_set(slot
, tag
, offset
);
599 slot
= slot
->slots
[offset
];
600 BUG_ON(slot
== NULL
);
601 shift
-= RADIX_TREE_MAP_SHIFT
;
605 /* set the root's tag bit */
606 if (slot
&& !root_tag_get(root
, tag
))
607 root_tag_set(root
, tag
);
611 EXPORT_SYMBOL(radix_tree_tag_set
);
614 * radix_tree_tag_clear - clear a tag on a radix tree node
615 * @root: radix tree root
619 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
620 * corresponding to @index in the radix tree. If
621 * this causes the leaf node to have no tags set then clear the tag in the
622 * next-to-leaf node, etc.
624 * Returns the address of the tagged item on success, else NULL. ie:
625 * has the same return value and semantics as radix_tree_lookup().
627 void *radix_tree_tag_clear(struct radix_tree_root
*root
,
628 unsigned long index
, unsigned int tag
)
630 struct radix_tree_node
*node
= NULL
;
631 struct radix_tree_node
*slot
= NULL
;
632 unsigned int height
, shift
;
633 int uninitialized_var(offset
);
635 height
= root
->height
;
636 if (index
> radix_tree_maxindex(height
))
639 shift
= height
* RADIX_TREE_MAP_SHIFT
;
640 slot
= indirect_to_ptr(root
->rnode
);
646 shift
-= RADIX_TREE_MAP_SHIFT
;
647 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
649 slot
= slot
->slots
[offset
];
656 if (!tag_get(node
, tag
, offset
))
658 tag_clear(node
, tag
, offset
);
659 if (any_tag_set(node
, tag
))
662 index
>>= RADIX_TREE_MAP_SHIFT
;
663 offset
= index
& RADIX_TREE_MAP_MASK
;
667 /* clear the root's tag bit */
668 if (root_tag_get(root
, tag
))
669 root_tag_clear(root
, tag
);
674 EXPORT_SYMBOL(radix_tree_tag_clear
);
677 * radix_tree_tag_get - get a tag on a radix tree node
678 * @root: radix tree root
680 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
684 * 0: tag not present or not set
687 * Note that the return value of this function may not be relied on, even if
688 * the RCU lock is held, unless tag modification and node deletion are excluded
691 int radix_tree_tag_get(struct radix_tree_root
*root
,
692 unsigned long index
, unsigned int tag
)
694 unsigned int height
, shift
;
695 struct radix_tree_node
*node
;
697 /* check the root's tag bit */
698 if (!root_tag_get(root
, tag
))
701 node
= rcu_dereference_raw(root
->rnode
);
705 if (!radix_tree_is_indirect_ptr(node
))
707 node
= indirect_to_ptr(node
);
709 height
= node
->path
& RADIX_TREE_HEIGHT_MASK
;
710 if (index
> radix_tree_maxindex(height
))
713 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
721 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
722 if (!tag_get(node
, tag
, offset
))
726 node
= rcu_dereference_raw(node
->slots
[offset
]);
727 shift
-= RADIX_TREE_MAP_SHIFT
;
731 EXPORT_SYMBOL(radix_tree_tag_get
);
734 * radix_tree_next_chunk - find next chunk of slots for iteration
736 * @root: radix tree root
737 * @iter: iterator state
738 * @flags: RADIX_TREE_ITER_* flags and tag index
739 * Returns: pointer to chunk first slot, or NULL if iteration is over
741 void **radix_tree_next_chunk(struct radix_tree_root
*root
,
742 struct radix_tree_iter
*iter
, unsigned flags
)
744 unsigned shift
, tag
= flags
& RADIX_TREE_ITER_TAG_MASK
;
745 struct radix_tree_node
*rnode
, *node
;
746 unsigned long index
, offset
, height
;
748 if ((flags
& RADIX_TREE_ITER_TAGGED
) && !root_tag_get(root
, tag
))
752 * Catch next_index overflow after ~0UL. iter->index never overflows
753 * during iterating; it can be zero only at the beginning.
754 * And we cannot overflow iter->next_index in a single step,
755 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
757 * This condition also used by radix_tree_next_slot() to stop
758 * contiguous iterating, and forbid swithing to the next chunk.
760 index
= iter
->next_index
;
761 if (!index
&& iter
->index
)
764 rnode
= rcu_dereference_raw(root
->rnode
);
765 if (radix_tree_is_indirect_ptr(rnode
)) {
766 rnode
= indirect_to_ptr(rnode
);
767 } else if (rnode
&& !index
) {
768 /* Single-slot tree */
770 iter
->next_index
= 1;
772 return (void **)&root
->rnode
;
777 height
= rnode
->path
& RADIX_TREE_HEIGHT_MASK
;
778 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
779 offset
= index
>> shift
;
781 /* Index outside of the tree */
782 if (offset
>= RADIX_TREE_MAP_SIZE
)
787 if ((flags
& RADIX_TREE_ITER_TAGGED
) ?
788 !test_bit(offset
, node
->tags
[tag
]) :
789 !node
->slots
[offset
]) {
791 if (flags
& RADIX_TREE_ITER_CONTIG
)
794 if (flags
& RADIX_TREE_ITER_TAGGED
)
795 offset
= radix_tree_find_next_bit(
800 while (++offset
< RADIX_TREE_MAP_SIZE
) {
801 if (node
->slots
[offset
])
804 index
&= ~((RADIX_TREE_MAP_SIZE
<< shift
) - 1);
805 index
+= offset
<< shift
;
806 /* Overflow after ~0UL */
809 if (offset
== RADIX_TREE_MAP_SIZE
)
813 /* This is leaf-node */
817 node
= rcu_dereference_raw(node
->slots
[offset
]);
820 shift
-= RADIX_TREE_MAP_SHIFT
;
821 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
824 /* Update the iterator state */
826 iter
->next_index
= (index
| RADIX_TREE_MAP_MASK
) + 1;
828 /* Construct iter->tags bit-mask from node->tags[tag] array */
829 if (flags
& RADIX_TREE_ITER_TAGGED
) {
830 unsigned tag_long
, tag_bit
;
832 tag_long
= offset
/ BITS_PER_LONG
;
833 tag_bit
= offset
% BITS_PER_LONG
;
834 iter
->tags
= node
->tags
[tag
][tag_long
] >> tag_bit
;
835 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
836 if (tag_long
< RADIX_TREE_TAG_LONGS
- 1) {
837 /* Pick tags from next element */
839 iter
->tags
|= node
->tags
[tag
][tag_long
+ 1] <<
840 (BITS_PER_LONG
- tag_bit
);
841 /* Clip chunk size, here only BITS_PER_LONG tags */
842 iter
->next_index
= index
+ BITS_PER_LONG
;
846 return node
->slots
+ offset
;
848 EXPORT_SYMBOL(radix_tree_next_chunk
);
851 * radix_tree_range_tag_if_tagged - for each item in given range set given
852 * tag if item has another tag set
853 * @root: radix tree root
854 * @first_indexp: pointer to a starting index of a range to scan
855 * @last_index: last index of a range to scan
856 * @nr_to_tag: maximum number items to tag
857 * @iftag: tag index to test
858 * @settag: tag index to set if tested tag is set
860 * This function scans range of radix tree from first_index to last_index
861 * (inclusive). For each item in the range if iftag is set, the function sets
862 * also settag. The function stops either after tagging nr_to_tag items or
863 * after reaching last_index.
865 * The tags must be set from the leaf level only and propagated back up the
866 * path to the root. We must do this so that we resolve the full path before
867 * setting any tags on intermediate nodes. If we set tags as we descend, then
868 * we can get to the leaf node and find that the index that has the iftag
869 * set is outside the range we are scanning. This reults in dangling tags and
870 * can lead to problems with later tag operations (e.g. livelocks on lookups).
872 * The function returns number of leaves where the tag was set and sets
873 * *first_indexp to the first unscanned index.
874 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
875 * be prepared to handle that.
877 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root
*root
,
878 unsigned long *first_indexp
, unsigned long last_index
,
879 unsigned long nr_to_tag
,
880 unsigned int iftag
, unsigned int settag
)
882 unsigned int height
= root
->height
;
883 struct radix_tree_node
*node
= NULL
;
884 struct radix_tree_node
*slot
;
886 unsigned long tagged
= 0;
887 unsigned long index
= *first_indexp
;
889 last_index
= min(last_index
, radix_tree_maxindex(height
));
890 if (index
> last_index
)
894 if (!root_tag_get(root
, iftag
)) {
895 *first_indexp
= last_index
+ 1;
899 *first_indexp
= last_index
+ 1;
900 root_tag_set(root
, settag
);
904 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
905 slot
= indirect_to_ptr(root
->rnode
);
908 unsigned long upindex
;
911 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
912 if (!slot
->slots
[offset
])
914 if (!tag_get(slot
, iftag
, offset
))
917 /* Go down one level */
918 shift
-= RADIX_TREE_MAP_SHIFT
;
920 slot
= slot
->slots
[offset
];
926 tag_set(slot
, settag
, offset
);
928 /* walk back up the path tagging interior nodes */
931 upindex
>>= RADIX_TREE_MAP_SHIFT
;
932 offset
= upindex
& RADIX_TREE_MAP_MASK
;
934 /* stop if we find a node with the tag already set */
935 if (tag_get(node
, settag
, offset
))
937 tag_set(node
, settag
, offset
);
942 * Small optimization: now clear that node pointer.
943 * Since all of this slot's ancestors now have the tag set
944 * from setting it above, we have no further need to walk
945 * back up the tree setting tags, until we update slot to
946 * point to another radix_tree_node.
951 /* Go to next item at level determined by 'shift' */
952 index
= ((index
>> shift
) + 1) << shift
;
953 /* Overflow can happen when last_index is ~0UL... */
954 if (index
> last_index
|| !index
)
956 if (tagged
>= nr_to_tag
)
958 while (((index
>> shift
) & RADIX_TREE_MAP_MASK
) == 0) {
960 * We've fully scanned this node. Go up. Because
961 * last_index is guaranteed to be in the tree, what
962 * we do below cannot wander astray.
965 shift
+= RADIX_TREE_MAP_SHIFT
;
969 * We need not to tag the root tag if there is no tag which is set with
970 * settag within the range from *first_indexp to last_index.
973 root_tag_set(root
, settag
);
974 *first_indexp
= index
;
978 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged
);
981 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
982 * @root: radix tree root
983 * @results: where the results of the lookup are placed
984 * @first_index: start the lookup from this key
985 * @max_items: place up to this many items at *results
987 * Performs an index-ascending scan of the tree for present items. Places
988 * them at *@results and returns the number of items which were placed at
991 * The implementation is naive.
993 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
994 * rcu_read_lock. In this case, rather than the returned results being
995 * an atomic snapshot of the tree at a single point in time, the semantics
996 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
997 * have been issued in individual locks, and results stored in 'results'.
1000 radix_tree_gang_lookup(struct radix_tree_root
*root
, void **results
,
1001 unsigned long first_index
, unsigned int max_items
)
1003 struct radix_tree_iter iter
;
1005 unsigned int ret
= 0;
1007 if (unlikely(!max_items
))
1010 radix_tree_for_each_slot(slot
, root
, &iter
, first_index
) {
1011 results
[ret
] = indirect_to_ptr(rcu_dereference_raw(*slot
));
1014 if (++ret
== max_items
)
1020 EXPORT_SYMBOL(radix_tree_gang_lookup
);
1023 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1024 * @root: radix tree root
1025 * @results: where the results of the lookup are placed
1026 * @indices: where their indices should be placed (but usually NULL)
1027 * @first_index: start the lookup from this key
1028 * @max_items: place up to this many items at *results
1030 * Performs an index-ascending scan of the tree for present items. Places
1031 * their slots at *@results and returns the number of items which were
1032 * placed at *@results.
1034 * The implementation is naive.
1036 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1037 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1038 * protection, radix_tree_deref_slot may fail requiring a retry.
1041 radix_tree_gang_lookup_slot(struct radix_tree_root
*root
,
1042 void ***results
, unsigned long *indices
,
1043 unsigned long first_index
, unsigned int max_items
)
1045 struct radix_tree_iter iter
;
1047 unsigned int ret
= 0;
1049 if (unlikely(!max_items
))
1052 radix_tree_for_each_slot(slot
, root
, &iter
, first_index
) {
1053 results
[ret
] = slot
;
1055 indices
[ret
] = iter
.index
;
1056 if (++ret
== max_items
)
1062 EXPORT_SYMBOL(radix_tree_gang_lookup_slot
);
1065 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1067 * @root: radix tree root
1068 * @results: where the results of the lookup are placed
1069 * @first_index: start the lookup from this key
1070 * @max_items: place up to this many items at *results
1071 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1073 * Performs an index-ascending scan of the tree for present items which
1074 * have the tag indexed by @tag set. Places the items at *@results and
1075 * returns the number of items which were placed at *@results.
1078 radix_tree_gang_lookup_tag(struct radix_tree_root
*root
, void **results
,
1079 unsigned long first_index
, unsigned int max_items
,
1082 struct radix_tree_iter iter
;
1084 unsigned int ret
= 0;
1086 if (unlikely(!max_items
))
1089 radix_tree_for_each_tagged(slot
, root
, &iter
, first_index
, tag
) {
1090 results
[ret
] = indirect_to_ptr(rcu_dereference_raw(*slot
));
1093 if (++ret
== max_items
)
1099 EXPORT_SYMBOL(radix_tree_gang_lookup_tag
);
1102 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1103 * radix tree based on a tag
1104 * @root: radix tree root
1105 * @results: where the results of the lookup are placed
1106 * @first_index: start the lookup from this key
1107 * @max_items: place up to this many items at *results
1108 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1110 * Performs an index-ascending scan of the tree for present items which
1111 * have the tag indexed by @tag set. Places the slots at *@results and
1112 * returns the number of slots which were placed at *@results.
1115 radix_tree_gang_lookup_tag_slot(struct radix_tree_root
*root
, void ***results
,
1116 unsigned long first_index
, unsigned int max_items
,
1119 struct radix_tree_iter iter
;
1121 unsigned int ret
= 0;
1123 if (unlikely(!max_items
))
1126 radix_tree_for_each_tagged(slot
, root
, &iter
, first_index
, tag
) {
1127 results
[ret
] = slot
;
1128 if (++ret
== max_items
)
1134 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot
);
1136 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1137 #include <linux/sched.h> /* for cond_resched() */
1140 * This linear search is at present only useful to shmem_unuse_inode().
1142 static unsigned long __locate(struct radix_tree_node
*slot
, void *item
,
1143 unsigned long index
, unsigned long *found_index
)
1145 unsigned int shift
, height
;
1148 height
= slot
->path
& RADIX_TREE_HEIGHT_MASK
;
1149 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
1151 for ( ; height
> 1; height
--) {
1152 i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
1154 if (slot
->slots
[i
] != NULL
)
1156 index
&= ~((1UL << shift
) - 1);
1157 index
+= 1UL << shift
;
1159 goto out
; /* 32-bit wraparound */
1161 if (i
== RADIX_TREE_MAP_SIZE
)
1165 shift
-= RADIX_TREE_MAP_SHIFT
;
1166 slot
= rcu_dereference_raw(slot
->slots
[i
]);
1171 /* Bottom level: check items */
1172 for (i
= 0; i
< RADIX_TREE_MAP_SIZE
; i
++) {
1173 if (slot
->slots
[i
] == item
) {
1174 *found_index
= index
+ i
;
1179 index
+= RADIX_TREE_MAP_SIZE
;
1185 * radix_tree_locate_item - search through radix tree for item
1186 * @root: radix tree root
1187 * @item: item to be found
1189 * Returns index where item was found, or -1 if not found.
1190 * Caller must hold no lock (since this time-consuming function needs
1191 * to be preemptible), and must check afterwards if item is still there.
1193 unsigned long radix_tree_locate_item(struct radix_tree_root
*root
, void *item
)
1195 struct radix_tree_node
*node
;
1196 unsigned long max_index
;
1197 unsigned long cur_index
= 0;
1198 unsigned long found_index
= -1;
1202 node
= rcu_dereference_raw(root
->rnode
);
1203 if (!radix_tree_is_indirect_ptr(node
)) {
1210 node
= indirect_to_ptr(node
);
1211 max_index
= radix_tree_maxindex(node
->path
&
1212 RADIX_TREE_HEIGHT_MASK
);
1213 if (cur_index
> max_index
) {
1218 cur_index
= __locate(node
, item
, cur_index
, &found_index
);
1221 } while (cur_index
!= 0 && cur_index
<= max_index
);
1226 unsigned long radix_tree_locate_item(struct radix_tree_root
*root
, void *item
)
1230 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1233 * radix_tree_shrink - shrink height of a radix tree to minimal
1234 * @root radix tree root
1236 static inline void radix_tree_shrink(struct radix_tree_root
*root
)
1238 /* try to shrink tree height */
1239 while (root
->height
> 0) {
1240 struct radix_tree_node
*to_free
= root
->rnode
;
1241 struct radix_tree_node
*slot
;
1243 BUG_ON(!radix_tree_is_indirect_ptr(to_free
));
1244 to_free
= indirect_to_ptr(to_free
);
1247 * The candidate node has more than one child, or its child
1248 * is not at the leftmost slot, we cannot shrink.
1250 if (to_free
->count
!= 1)
1252 if (!to_free
->slots
[0])
1256 * We don't need rcu_assign_pointer(), since we are simply
1257 * moving the node from one part of the tree to another: if it
1258 * was safe to dereference the old pointer to it
1259 * (to_free->slots[0]), it will be safe to dereference the new
1260 * one (root->rnode) as far as dependent read barriers go.
1262 slot
= to_free
->slots
[0];
1263 if (root
->height
> 1) {
1264 slot
->parent
= NULL
;
1265 slot
= ptr_to_indirect(slot
);
1271 * We have a dilemma here. The node's slot[0] must not be
1272 * NULLed in case there are concurrent lookups expecting to
1273 * find the item. However if this was a bottom-level node,
1274 * then it may be subject to the slot pointer being visible
1275 * to callers dereferencing it. If item corresponding to
1276 * slot[0] is subsequently deleted, these callers would expect
1277 * their slot to become empty sooner or later.
1279 * For example, lockless pagecache will look up a slot, deref
1280 * the page pointer, and if the page is 0 refcount it means it
1281 * was concurrently deleted from pagecache so try the deref
1282 * again. Fortunately there is already a requirement for logic
1283 * to retry the entire slot lookup -- the indirect pointer
1284 * problem (replacing direct root node with an indirect pointer
1285 * also results in a stale slot). So tag the slot as indirect
1286 * to force callers to retry.
1288 if (root
->height
== 0)
1289 *((unsigned long *)&to_free
->slots
[0]) |=
1290 RADIX_TREE_INDIRECT_PTR
;
1292 radix_tree_node_free(to_free
);
1297 * __radix_tree_delete_node - try to free node after clearing a slot
1298 * @root: radix tree root
1300 * @node: node containing @index
1302 * After clearing the slot at @index in @node from radix tree
1303 * rooted at @root, call this function to attempt freeing the
1304 * node and shrinking the tree.
1306 * Returns %true if @node was freed, %false otherwise.
1308 bool __radix_tree_delete_node(struct radix_tree_root
*root
,
1309 struct radix_tree_node
*node
)
1311 bool deleted
= false;
1314 struct radix_tree_node
*parent
;
1317 if (node
== indirect_to_ptr(root
->rnode
)) {
1318 radix_tree_shrink(root
);
1319 if (root
->height
== 0)
1325 parent
= node
->parent
;
1327 unsigned int offset
;
1329 offset
= node
->path
>> RADIX_TREE_HEIGHT_SHIFT
;
1330 parent
->slots
[offset
] = NULL
;
1333 root_tag_clear_all(root
);
1338 radix_tree_node_free(node
);
1348 * radix_tree_delete_item - delete an item from a radix tree
1349 * @root: radix tree root
1351 * @item: expected item
1353 * Remove @item at @index from the radix tree rooted at @root.
1355 * Returns the address of the deleted item, or NULL if it was not present
1356 * or the entry at the given @index was not @item.
1358 void *radix_tree_delete_item(struct radix_tree_root
*root
,
1359 unsigned long index
, void *item
)
1361 struct radix_tree_node
*node
;
1362 unsigned int offset
;
1367 entry
= __radix_tree_lookup(root
, index
, &node
, &slot
);
1371 if (item
&& entry
!= item
)
1375 root_tag_clear_all(root
);
1380 offset
= index
& RADIX_TREE_MAP_MASK
;
1383 * Clear all tags associated with the item to be deleted.
1384 * This way of doing it would be inefficient, but seldom is any set.
1386 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
1387 if (tag_get(node
, tag
, offset
))
1388 radix_tree_tag_clear(root
, index
, tag
);
1391 node
->slots
[offset
] = NULL
;
1394 __radix_tree_delete_node(root
, node
);
1398 EXPORT_SYMBOL(radix_tree_delete_item
);
1401 * radix_tree_delete - delete an item from a radix tree
1402 * @root: radix tree root
1405 * Remove the item at @index from the radix tree rooted at @root.
1407 * Returns the address of the deleted item, or NULL if it was not present.
1409 void *radix_tree_delete(struct radix_tree_root
*root
, unsigned long index
)
1411 return radix_tree_delete_item(root
, index
, NULL
);
1413 EXPORT_SYMBOL(radix_tree_delete
);
1416 * radix_tree_tagged - test whether any items in the tree are tagged
1417 * @root: radix tree root
1420 int radix_tree_tagged(struct radix_tree_root
*root
, unsigned int tag
)
1422 return root_tag_get(root
, tag
);
1424 EXPORT_SYMBOL(radix_tree_tagged
);
1427 radix_tree_node_ctor(void *arg
)
1429 struct radix_tree_node
*node
= arg
;
1431 memset(node
, 0, sizeof(*node
));
1432 INIT_LIST_HEAD(&node
->private_list
);
1435 static __init
unsigned long __maxindex(unsigned int height
)
1437 unsigned int width
= height
* RADIX_TREE_MAP_SHIFT
;
1438 int shift
= RADIX_TREE_INDEX_BITS
- width
;
1442 if (shift
>= BITS_PER_LONG
)
1444 return ~0UL >> shift
;
1447 static __init
void radix_tree_init_maxindex(void)
1451 for (i
= 0; i
< ARRAY_SIZE(height_to_maxindex
); i
++)
1452 height_to_maxindex
[i
] = __maxindex(i
);
1455 static int radix_tree_callback(struct notifier_block
*nfb
,
1456 unsigned long action
,
1459 int cpu
= (long)hcpu
;
1460 struct radix_tree_preload
*rtp
;
1462 /* Free per-cpu pool of perloaded nodes */
1463 if (action
== CPU_DEAD
|| action
== CPU_DEAD_FROZEN
) {
1464 rtp
= &per_cpu(radix_tree_preloads
, cpu
);
1466 kmem_cache_free(radix_tree_node_cachep
,
1467 rtp
->nodes
[rtp
->nr
-1]);
1468 rtp
->nodes
[rtp
->nr
-1] = NULL
;
1475 void __init
radix_tree_init(void)
1477 radix_tree_node_cachep
= kmem_cache_create("radix_tree_node",
1478 sizeof(struct radix_tree_node
), 0,
1479 SLAB_PANIC
| SLAB_RECLAIM_ACCOUNT
,
1480 radix_tree_node_ctor
);
1481 radix_tree_init_maxindex();
1482 hotcpu_notifier(radix_tree_callback
, 0);