2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/kmemleak.h>
31 #include <linux/notifier.h>
32 #include <linux/cpu.h>
33 #include <linux/string.h>
34 #include <linux/bitops.h>
35 #include <linux/rcupdate.h>
36 #include <linux/preempt.h> /* in_interrupt() */
40 * The height_to_maxindex array needs to be one deeper than the maximum
41 * path as height 0 holds only 1 entry.
43 static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
46 * Radix tree node cache.
48 static struct kmem_cache *radix_tree_node_cachep;
51 * The radix tree is variable-height, so an insert operation not only has
52 * to build the branch to its corresponding item, it also has to build the
53 * branch to existing items if the size has to be increased (by
56 * The worst case is a zero height tree with just a single item at index 0,
57 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
58 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
61 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
64 * Per-cpu pool of preloaded nodes
66 struct radix_tree_preload {
68 /* nodes->private_data points to next preallocated node */
69 struct radix_tree_node *nodes;
71 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
73 static inline void *ptr_to_indirect(void *ptr)
75 return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
78 static inline void *indirect_to_ptr(void *ptr)
80 return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
83 #define RADIX_TREE_RETRY ptr_to_indirect(NULL)
85 #ifdef CONFIG_RADIX_TREE_MULTIORDER
86 /* Sibling slots point directly to another slot in the same node */
87 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
90 return (parent->slots <= ptr) &&
91 (ptr < parent->slots + RADIX_TREE_MAP_SIZE);
94 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
100 static inline unsigned long get_slot_offset(struct radix_tree_node *parent,
103 return slot - parent->slots;
106 static unsigned radix_tree_descend(struct radix_tree_node *parent,
107 struct radix_tree_node **nodep, unsigned offset)
109 void **entry = rcu_dereference_raw(parent->slots[offset]);
111 #ifdef CONFIG_RADIX_TREE_MULTIORDER
112 if (radix_tree_is_indirect_ptr(entry)) {
113 unsigned long siboff = get_slot_offset(parent, entry);
114 if (siboff < RADIX_TREE_MAP_SIZE) {
116 entry = rcu_dereference_raw(parent->slots[offset]);
121 *nodep = (void *)entry;
125 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
127 return root->gfp_mask & __GFP_BITS_MASK;
130 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
133 __set_bit(offset, node->tags[tag]);
136 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
139 __clear_bit(offset, node->tags[tag]);
142 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
145 return test_bit(offset, node->tags[tag]);
148 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
150 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
153 static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
155 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
158 static inline void root_tag_clear_all(struct radix_tree_root *root)
160 root->gfp_mask &= __GFP_BITS_MASK;
163 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
165 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
168 static inline unsigned root_tags_get(struct radix_tree_root *root)
170 return (__force unsigned)root->gfp_mask >> __GFP_BITS_SHIFT;
174 * Returns 1 if any slot in the node has this tag set.
175 * Otherwise returns 0.
177 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
180 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
181 if (node->tags[tag][idx])
188 * radix_tree_find_next_bit - find the next set bit in a memory region
190 * @addr: The address to base the search on
191 * @size: The bitmap size in bits
192 * @offset: The bitnumber to start searching at
194 * Unrollable variant of find_next_bit() for constant size arrays.
195 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
196 * Returns next bit offset, or size if nothing found.
198 static __always_inline unsigned long
199 radix_tree_find_next_bit(const unsigned long *addr,
200 unsigned long size, unsigned long offset)
202 if (!__builtin_constant_p(size))
203 return find_next_bit(addr, size, offset);
208 addr += offset / BITS_PER_LONG;
209 tmp = *addr >> (offset % BITS_PER_LONG);
211 return __ffs(tmp) + offset;
212 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
213 while (offset < size) {
216 return __ffs(tmp) + offset;
217 offset += BITS_PER_LONG;
224 static void dump_node(void *slot, int height, int offset)
226 struct radix_tree_node *node;
233 pr_debug("radix entry %p offset %d\n", slot, offset);
237 node = indirect_to_ptr(slot);
238 pr_debug("radix node: %p offset %d tags %lx %lx %lx path %x count %d parent %p\n",
239 slot, offset, node->tags[0][0], node->tags[1][0],
240 node->tags[2][0], node->path, node->count, node->parent);
242 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++)
243 dump_node(node->slots[i], height - 1, i);
247 static void radix_tree_dump(struct radix_tree_root *root)
249 pr_debug("radix root: %p height %d rnode %p tags %x\n",
250 root, root->height, root->rnode,
251 root->gfp_mask >> __GFP_BITS_SHIFT);
252 if (!radix_tree_is_indirect_ptr(root->rnode))
254 dump_node(root->rnode, root->height, 0);
259 * This assumes that the caller has performed appropriate preallocation, and
260 * that the caller has pinned this thread of control to the current CPU.
262 static struct radix_tree_node *
263 radix_tree_node_alloc(struct radix_tree_root *root)
265 struct radix_tree_node *ret = NULL;
266 gfp_t gfp_mask = root_gfp_mask(root);
269 * Preload code isn't irq safe and it doesn't make sence to use
270 * preloading in the interrupt anyway as all the allocations have to
271 * be atomic. So just do normal allocation when in interrupt.
273 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
274 struct radix_tree_preload *rtp;
277 * Even if the caller has preloaded, try to allocate from the
278 * cache first for the new node to get accounted.
280 ret = kmem_cache_alloc(radix_tree_node_cachep,
281 gfp_mask | __GFP_ACCOUNT | __GFP_NOWARN);
286 * Provided the caller has preloaded here, we will always
287 * succeed in getting a node here (and never reach
290 rtp = this_cpu_ptr(&radix_tree_preloads);
293 rtp->nodes = ret->private_data;
294 ret->private_data = NULL;
298 * Update the allocation stack trace as this is more useful
301 kmemleak_update_trace(ret);
304 ret = kmem_cache_alloc(radix_tree_node_cachep,
305 gfp_mask | __GFP_ACCOUNT);
307 BUG_ON(radix_tree_is_indirect_ptr(ret));
311 static void radix_tree_node_rcu_free(struct rcu_head *head)
313 struct radix_tree_node *node =
314 container_of(head, struct radix_tree_node, rcu_head);
318 * must only free zeroed nodes into the slab. radix_tree_shrink
319 * can leave us with a non-NULL entry in the first slot, so clear
320 * that here to make sure.
322 for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
323 tag_clear(node, i, 0);
325 node->slots[0] = NULL;
328 kmem_cache_free(radix_tree_node_cachep, node);
332 radix_tree_node_free(struct radix_tree_node *node)
334 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
338 * Load up this CPU's radix_tree_node buffer with sufficient objects to
339 * ensure that the addition of a single element in the tree cannot fail. On
340 * success, return zero, with preemption disabled. On error, return -ENOMEM
341 * with preemption not disabled.
343 * To make use of this facility, the radix tree must be initialised without
344 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
346 static int __radix_tree_preload(gfp_t gfp_mask)
348 struct radix_tree_preload *rtp;
349 struct radix_tree_node *node;
353 rtp = this_cpu_ptr(&radix_tree_preloads);
354 while (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
356 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
360 rtp = this_cpu_ptr(&radix_tree_preloads);
361 if (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
362 node->private_data = rtp->nodes;
366 kmem_cache_free(radix_tree_node_cachep, node);
375 * Load up this CPU's radix_tree_node buffer with sufficient objects to
376 * ensure that the addition of a single element in the tree cannot fail. On
377 * success, return zero, with preemption disabled. On error, return -ENOMEM
378 * with preemption not disabled.
380 * To make use of this facility, the radix tree must be initialised without
381 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
383 int radix_tree_preload(gfp_t gfp_mask)
385 /* Warn on non-sensical use... */
386 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
387 return __radix_tree_preload(gfp_mask);
389 EXPORT_SYMBOL(radix_tree_preload);
392 * The same as above function, except we don't guarantee preloading happens.
393 * We do it, if we decide it helps. On success, return zero with preemption
394 * disabled. On error, return -ENOMEM with preemption not disabled.
396 int radix_tree_maybe_preload(gfp_t gfp_mask)
398 if (gfpflags_allow_blocking(gfp_mask))
399 return __radix_tree_preload(gfp_mask);
400 /* Preloading doesn't help anything with this gfp mask, skip it */
404 EXPORT_SYMBOL(radix_tree_maybe_preload);
407 * Return the maximum key which can be store into a
408 * radix tree with height HEIGHT.
410 static inline unsigned long radix_tree_maxindex(unsigned int height)
412 return height_to_maxindex[height];
415 static inline unsigned long node_maxindex(struct radix_tree_node *node)
417 return radix_tree_maxindex(node->path & RADIX_TREE_HEIGHT_MASK);
420 static unsigned radix_tree_load_root(struct radix_tree_root *root,
421 struct radix_tree_node **nodep, unsigned long *maxindex)
423 struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
427 if (likely(radix_tree_is_indirect_ptr(node))) {
428 node = indirect_to_ptr(node);
429 *maxindex = node_maxindex(node);
430 return (node->path & RADIX_TREE_HEIGHT_MASK) *
431 RADIX_TREE_MAP_SHIFT;
439 * Extend a radix tree so it can store key @index.
441 static int radix_tree_extend(struct radix_tree_root *root,
444 struct radix_tree_node *node;
445 struct radix_tree_node *slot;
449 /* Figure out what the height should be. */
450 height = root->height + 1;
451 while (index > radix_tree_maxindex(height))
454 if (root->rnode == NULL) {
455 root->height = height;
460 unsigned int newheight;
461 if (!(node = radix_tree_node_alloc(root)))
464 /* Propagate the aggregated tag info into the new root */
465 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
466 if (root_tag_get(root, tag))
467 tag_set(node, tag, 0);
470 /* Increase the height. */
471 newheight = root->height+1;
472 BUG_ON(newheight & ~RADIX_TREE_HEIGHT_MASK);
473 node->path = newheight;
477 if (radix_tree_is_indirect_ptr(slot)) {
478 slot = indirect_to_ptr(slot);
480 slot = ptr_to_indirect(slot);
482 node->slots[0] = slot;
483 node = ptr_to_indirect(node);
484 rcu_assign_pointer(root->rnode, node);
485 root->height = newheight;
486 } while (height > root->height);
488 return height * RADIX_TREE_MAP_SHIFT;
492 * __radix_tree_create - create a slot in a radix tree
493 * @root: radix tree root
495 * @order: index occupies 2^order aligned slots
496 * @nodep: returns node
497 * @slotp: returns slot
499 * Create, if necessary, and return the node and slot for an item
500 * at position @index in the radix tree @root.
502 * Until there is more than one item in the tree, no nodes are
503 * allocated and @root->rnode is used as a direct slot instead of
504 * pointing to a node, in which case *@nodep will be NULL.
506 * Returns -ENOMEM, or 0 for success.
508 int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
509 unsigned order, struct radix_tree_node **nodep,
512 struct radix_tree_node *node = NULL, *slot;
513 unsigned long maxindex;
514 unsigned int height, shift, offset;
515 unsigned long max = index | ((1UL << order) - 1);
517 shift = radix_tree_load_root(root, &slot, &maxindex);
519 /* Make sure the tree is high enough. */
520 if (max > maxindex) {
521 int error = radix_tree_extend(root, max);
526 if (order == shift) {
527 shift += RADIX_TREE_MAP_SHIFT;
532 height = root->height;
534 offset = 0; /* uninitialised var warning */
535 while (shift > order) {
537 /* Have to add a child node. */
538 if (!(slot = radix_tree_node_alloc(root)))
543 rcu_assign_pointer(node->slots[offset],
544 ptr_to_indirect(slot));
546 slot->path |= offset << RADIX_TREE_HEIGHT_SHIFT;
548 rcu_assign_pointer(root->rnode,
549 ptr_to_indirect(slot));
550 } else if (!radix_tree_is_indirect_ptr(slot))
553 /* Go a level down */
555 shift -= RADIX_TREE_MAP_SHIFT;
556 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
557 node = indirect_to_ptr(slot);
558 slot = node->slots[offset];
561 #ifdef CONFIG_RADIX_TREE_MULTIORDER
562 /* Insert pointers to the canonical entry */
564 int i, n = 1 << (order - shift);
565 offset = offset & ~(n - 1);
566 slot = ptr_to_indirect(&node->slots[offset]);
567 for (i = 0; i < n; i++) {
568 if (node->slots[offset + i])
572 for (i = 1; i < n; i++) {
573 rcu_assign_pointer(node->slots[offset + i], slot);
582 *slotp = node ? node->slots + offset : (void **)&root->rnode;
587 * __radix_tree_insert - insert into a radix tree
588 * @root: radix tree root
590 * @order: key covers the 2^order indices around index
591 * @item: item to insert
593 * Insert an item into the radix tree at position @index.
595 int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
596 unsigned order, void *item)
598 struct radix_tree_node *node;
602 BUG_ON(radix_tree_is_indirect_ptr(item));
604 error = __radix_tree_create(root, index, order, &node, &slot);
609 rcu_assign_pointer(*slot, item);
612 unsigned offset = get_slot_offset(node, slot);
614 BUG_ON(tag_get(node, 0, offset));
615 BUG_ON(tag_get(node, 1, offset));
616 BUG_ON(tag_get(node, 2, offset));
618 BUG_ON(root_tags_get(root));
623 EXPORT_SYMBOL(__radix_tree_insert);
626 * __radix_tree_lookup - lookup an item in a radix tree
627 * @root: radix tree root
629 * @nodep: returns node
630 * @slotp: returns slot
632 * Lookup and return the item at position @index in the radix
635 * Until there is more than one item in the tree, no nodes are
636 * allocated and @root->rnode is used as a direct slot instead of
637 * pointing to a node, in which case *@nodep will be NULL.
639 void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
640 struct radix_tree_node **nodep, void ***slotp)
642 struct radix_tree_node *node, *parent;
643 unsigned long maxindex;
649 slot = (void **)&root->rnode;
650 shift = radix_tree_load_root(root, &node, &maxindex);
651 if (index > maxindex)
654 while (radix_tree_is_indirect_ptr(node)) {
657 if (node == RADIX_TREE_RETRY)
659 parent = indirect_to_ptr(node);
660 shift -= RADIX_TREE_MAP_SHIFT;
661 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
662 offset = radix_tree_descend(parent, &node, offset);
663 slot = parent->slots + offset;
674 * radix_tree_lookup_slot - lookup a slot in a radix tree
675 * @root: radix tree root
678 * Returns: the slot corresponding to the position @index in the
679 * radix tree @root. This is useful for update-if-exists operations.
681 * This function can be called under rcu_read_lock iff the slot is not
682 * modified by radix_tree_replace_slot, otherwise it must be called
683 * exclusive from other writers. Any dereference of the slot must be done
684 * using radix_tree_deref_slot.
686 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
690 if (!__radix_tree_lookup(root, index, NULL, &slot))
694 EXPORT_SYMBOL(radix_tree_lookup_slot);
697 * radix_tree_lookup - perform lookup operation on a radix tree
698 * @root: radix tree root
701 * Lookup the item at the position @index in the radix tree @root.
703 * This function can be called under rcu_read_lock, however the caller
704 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
705 * them safely). No RCU barriers are required to access or modify the
706 * returned item, however.
708 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
710 return __radix_tree_lookup(root, index, NULL, NULL);
712 EXPORT_SYMBOL(radix_tree_lookup);
715 * radix_tree_tag_set - set a tag on a radix tree node
716 * @root: radix tree root
720 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
721 * corresponding to @index in the radix tree. From
722 * the root all the way down to the leaf node.
724 * Returns the address of the tagged item. Setting a tag on a not-present
727 void *radix_tree_tag_set(struct radix_tree_root *root,
728 unsigned long index, unsigned int tag)
730 unsigned int height, shift;
731 struct radix_tree_node *slot;
733 height = root->height;
734 BUG_ON(index > radix_tree_maxindex(height));
736 slot = indirect_to_ptr(root->rnode);
737 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
742 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
743 if (!tag_get(slot, tag, offset))
744 tag_set(slot, tag, offset);
745 slot = slot->slots[offset];
746 BUG_ON(slot == NULL);
747 if (!radix_tree_is_indirect_ptr(slot))
749 slot = indirect_to_ptr(slot);
750 shift -= RADIX_TREE_MAP_SHIFT;
754 /* set the root's tag bit */
755 if (slot && !root_tag_get(root, tag))
756 root_tag_set(root, tag);
760 EXPORT_SYMBOL(radix_tree_tag_set);
763 * radix_tree_tag_clear - clear a tag on a radix tree node
764 * @root: radix tree root
768 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
769 * corresponding to @index in the radix tree. If
770 * this causes the leaf node to have no tags set then clear the tag in the
771 * next-to-leaf node, etc.
773 * Returns the address of the tagged item on success, else NULL. ie:
774 * has the same return value and semantics as radix_tree_lookup().
776 void *radix_tree_tag_clear(struct radix_tree_root *root,
777 unsigned long index, unsigned int tag)
779 struct radix_tree_node *node = NULL;
780 struct radix_tree_node *slot = NULL;
781 unsigned int height, shift;
782 int uninitialized_var(offset);
784 height = root->height;
785 if (index > radix_tree_maxindex(height))
788 shift = height * RADIX_TREE_MAP_SHIFT;
794 if (!radix_tree_is_indirect_ptr(slot))
796 slot = indirect_to_ptr(slot);
798 shift -= RADIX_TREE_MAP_SHIFT;
799 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
801 slot = slot->slots[offset];
808 if (!tag_get(node, tag, offset))
810 tag_clear(node, tag, offset);
811 if (any_tag_set(node, tag))
814 index >>= RADIX_TREE_MAP_SHIFT;
815 offset = index & RADIX_TREE_MAP_MASK;
819 /* clear the root's tag bit */
820 if (root_tag_get(root, tag))
821 root_tag_clear(root, tag);
826 EXPORT_SYMBOL(radix_tree_tag_clear);
829 * radix_tree_tag_get - get a tag on a radix tree node
830 * @root: radix tree root
832 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
836 * 0: tag not present or not set
839 * Note that the return value of this function may not be relied on, even if
840 * the RCU lock is held, unless tag modification and node deletion are excluded
843 int radix_tree_tag_get(struct radix_tree_root *root,
844 unsigned long index, unsigned int tag)
846 unsigned int height, shift;
847 struct radix_tree_node *node;
849 /* check the root's tag bit */
850 if (!root_tag_get(root, tag))
853 node = rcu_dereference_raw(root->rnode);
857 if (!radix_tree_is_indirect_ptr(node))
859 node = indirect_to_ptr(node);
861 height = node->path & RADIX_TREE_HEIGHT_MASK;
862 if (index > radix_tree_maxindex(height))
865 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
872 node = indirect_to_ptr(node);
874 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
875 if (!tag_get(node, tag, offset))
879 node = rcu_dereference_raw(node->slots[offset]);
880 if (!radix_tree_is_indirect_ptr(node))
882 shift -= RADIX_TREE_MAP_SHIFT;
886 EXPORT_SYMBOL(radix_tree_tag_get);
889 * radix_tree_next_chunk - find next chunk of slots for iteration
891 * @root: radix tree root
892 * @iter: iterator state
893 * @flags: RADIX_TREE_ITER_* flags and tag index
894 * Returns: pointer to chunk first slot, or NULL if iteration is over
896 void **radix_tree_next_chunk(struct radix_tree_root *root,
897 struct radix_tree_iter *iter, unsigned flags)
899 unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
900 struct radix_tree_node *rnode, *node;
901 unsigned long index, offset, height;
903 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
907 * Catch next_index overflow after ~0UL. iter->index never overflows
908 * during iterating; it can be zero only at the beginning.
909 * And we cannot overflow iter->next_index in a single step,
910 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
912 * This condition also used by radix_tree_next_slot() to stop
913 * contiguous iterating, and forbid swithing to the next chunk.
915 index = iter->next_index;
916 if (!index && iter->index)
919 rnode = rcu_dereference_raw(root->rnode);
920 if (radix_tree_is_indirect_ptr(rnode)) {
921 rnode = indirect_to_ptr(rnode);
922 } else if (rnode && !index) {
923 /* Single-slot tree */
925 iter->next_index = 1;
927 return (void **)&root->rnode;
932 height = rnode->path & RADIX_TREE_HEIGHT_MASK;
933 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
934 offset = index >> shift;
936 /* Index outside of the tree */
937 if (offset >= RADIX_TREE_MAP_SIZE)
942 struct radix_tree_node *slot;
943 if ((flags & RADIX_TREE_ITER_TAGGED) ?
944 !test_bit(offset, node->tags[tag]) :
945 !node->slots[offset]) {
947 if (flags & RADIX_TREE_ITER_CONTIG)
950 if (flags & RADIX_TREE_ITER_TAGGED)
951 offset = radix_tree_find_next_bit(
956 while (++offset < RADIX_TREE_MAP_SIZE) {
957 if (node->slots[offset])
960 index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
961 index += offset << shift;
962 /* Overflow after ~0UL */
965 if (offset == RADIX_TREE_MAP_SIZE)
969 /* This is leaf-node */
973 slot = rcu_dereference_raw(node->slots[offset]);
976 if (!radix_tree_is_indirect_ptr(slot))
978 node = indirect_to_ptr(slot);
979 shift -= RADIX_TREE_MAP_SHIFT;
980 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
983 /* Update the iterator state */
985 iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
987 /* Construct iter->tags bit-mask from node->tags[tag] array */
988 if (flags & RADIX_TREE_ITER_TAGGED) {
989 unsigned tag_long, tag_bit;
991 tag_long = offset / BITS_PER_LONG;
992 tag_bit = offset % BITS_PER_LONG;
993 iter->tags = node->tags[tag][tag_long] >> tag_bit;
994 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
995 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
996 /* Pick tags from next element */
998 iter->tags |= node->tags[tag][tag_long + 1] <<
999 (BITS_PER_LONG - tag_bit);
1000 /* Clip chunk size, here only BITS_PER_LONG tags */
1001 iter->next_index = index + BITS_PER_LONG;
1005 return node->slots + offset;
1007 EXPORT_SYMBOL(radix_tree_next_chunk);
1010 * radix_tree_range_tag_if_tagged - for each item in given range set given
1011 * tag if item has another tag set
1012 * @root: radix tree root
1013 * @first_indexp: pointer to a starting index of a range to scan
1014 * @last_index: last index of a range to scan
1015 * @nr_to_tag: maximum number items to tag
1016 * @iftag: tag index to test
1017 * @settag: tag index to set if tested tag is set
1019 * This function scans range of radix tree from first_index to last_index
1020 * (inclusive). For each item in the range if iftag is set, the function sets
1021 * also settag. The function stops either after tagging nr_to_tag items or
1022 * after reaching last_index.
1024 * The tags must be set from the leaf level only and propagated back up the
1025 * path to the root. We must do this so that we resolve the full path before
1026 * setting any tags on intermediate nodes. If we set tags as we descend, then
1027 * we can get to the leaf node and find that the index that has the iftag
1028 * set is outside the range we are scanning. This reults in dangling tags and
1029 * can lead to problems with later tag operations (e.g. livelocks on lookups).
1031 * The function returns number of leaves where the tag was set and sets
1032 * *first_indexp to the first unscanned index.
1033 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
1034 * be prepared to handle that.
1036 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
1037 unsigned long *first_indexp, unsigned long last_index,
1038 unsigned long nr_to_tag,
1039 unsigned int iftag, unsigned int settag)
1041 unsigned int height = root->height;
1042 struct radix_tree_node *node = NULL;
1043 struct radix_tree_node *slot;
1045 unsigned long tagged = 0;
1046 unsigned long index = *first_indexp;
1048 last_index = min(last_index, radix_tree_maxindex(height));
1049 if (index > last_index)
1053 if (!root_tag_get(root, iftag)) {
1054 *first_indexp = last_index + 1;
1058 *first_indexp = last_index + 1;
1059 root_tag_set(root, settag);
1063 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
1064 slot = indirect_to_ptr(root->rnode);
1067 unsigned long upindex;
1070 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
1071 if (!slot->slots[offset])
1073 if (!tag_get(slot, iftag, offset))
1077 slot = slot->slots[offset];
1078 if (radix_tree_is_indirect_ptr(slot)) {
1079 slot = indirect_to_ptr(slot);
1080 shift -= RADIX_TREE_MAP_SHIFT;
1084 node = node->parent;
1089 tagged += 1 << shift;
1090 tag_set(slot, settag, offset);
1092 /* walk back up the path tagging interior nodes */
1095 upindex >>= RADIX_TREE_MAP_SHIFT;
1096 offset = upindex & RADIX_TREE_MAP_MASK;
1098 /* stop if we find a node with the tag already set */
1099 if (tag_get(node, settag, offset))
1101 tag_set(node, settag, offset);
1102 node = node->parent;
1106 * Small optimization: now clear that node pointer.
1107 * Since all of this slot's ancestors now have the tag set
1108 * from setting it above, we have no further need to walk
1109 * back up the tree setting tags, until we update slot to
1110 * point to another radix_tree_node.
1115 /* Go to next item at level determined by 'shift' */
1116 index = ((index >> shift) + 1) << shift;
1117 /* Overflow can happen when last_index is ~0UL... */
1118 if (index > last_index || !index)
1120 if (tagged >= nr_to_tag)
1122 while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
1124 * We've fully scanned this node. Go up. Because
1125 * last_index is guaranteed to be in the tree, what
1126 * we do below cannot wander astray.
1128 slot = slot->parent;
1129 shift += RADIX_TREE_MAP_SHIFT;
1133 * We need not to tag the root tag if there is no tag which is set with
1134 * settag within the range from *first_indexp to last_index.
1137 root_tag_set(root, settag);
1138 *first_indexp = index;
1142 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
1145 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1146 * @root: radix tree root
1147 * @results: where the results of the lookup are placed
1148 * @first_index: start the lookup from this key
1149 * @max_items: place up to this many items at *results
1151 * Performs an index-ascending scan of the tree for present items. Places
1152 * them at *@results and returns the number of items which were placed at
1155 * The implementation is naive.
1157 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1158 * rcu_read_lock. In this case, rather than the returned results being
1159 * an atomic snapshot of the tree at a single point in time, the semantics
1160 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
1161 * have been issued in individual locks, and results stored in 'results'.
1164 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1165 unsigned long first_index, unsigned int max_items)
1167 struct radix_tree_iter iter;
1169 unsigned int ret = 0;
1171 if (unlikely(!max_items))
1174 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1175 results[ret] = rcu_dereference_raw(*slot);
1178 if (radix_tree_is_indirect_ptr(results[ret])) {
1179 slot = radix_tree_iter_retry(&iter);
1182 if (++ret == max_items)
1188 EXPORT_SYMBOL(radix_tree_gang_lookup);
1191 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1192 * @root: radix tree root
1193 * @results: where the results of the lookup are placed
1194 * @indices: where their indices should be placed (but usually NULL)
1195 * @first_index: start the lookup from this key
1196 * @max_items: place up to this many items at *results
1198 * Performs an index-ascending scan of the tree for present items. Places
1199 * their slots at *@results and returns the number of items which were
1200 * placed at *@results.
1202 * The implementation is naive.
1204 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1205 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1206 * protection, radix_tree_deref_slot may fail requiring a retry.
1209 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1210 void ***results, unsigned long *indices,
1211 unsigned long first_index, unsigned int max_items)
1213 struct radix_tree_iter iter;
1215 unsigned int ret = 0;
1217 if (unlikely(!max_items))
1220 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1221 results[ret] = slot;
1223 indices[ret] = iter.index;
1224 if (++ret == max_items)
1230 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1233 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1235 * @root: radix tree root
1236 * @results: where the results of the lookup are placed
1237 * @first_index: start the lookup from this key
1238 * @max_items: place up to this many items at *results
1239 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1241 * Performs an index-ascending scan of the tree for present items which
1242 * have the tag indexed by @tag set. Places the items at *@results and
1243 * returns the number of items which were placed at *@results.
1246 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1247 unsigned long first_index, unsigned int max_items,
1250 struct radix_tree_iter iter;
1252 unsigned int ret = 0;
1254 if (unlikely(!max_items))
1257 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1258 results[ret] = rcu_dereference_raw(*slot);
1261 if (radix_tree_is_indirect_ptr(results[ret])) {
1262 slot = radix_tree_iter_retry(&iter);
1265 if (++ret == max_items)
1271 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1274 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1275 * radix tree based on a tag
1276 * @root: radix tree root
1277 * @results: where the results of the lookup are placed
1278 * @first_index: start the lookup from this key
1279 * @max_items: place up to this many items at *results
1280 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1282 * Performs an index-ascending scan of the tree for present items which
1283 * have the tag indexed by @tag set. Places the slots at *@results and
1284 * returns the number of slots which were placed at *@results.
1287 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1288 unsigned long first_index, unsigned int max_items,
1291 struct radix_tree_iter iter;
1293 unsigned int ret = 0;
1295 if (unlikely(!max_items))
1298 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1299 results[ret] = slot;
1300 if (++ret == max_items)
1306 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1308 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1309 #include <linux/sched.h> /* for cond_resched() */
1312 * This linear search is at present only useful to shmem_unuse_inode().
1314 static unsigned long __locate(struct radix_tree_node *slot, void *item,
1315 unsigned long index, unsigned long *found_index)
1317 unsigned int shift, height;
1320 height = slot->path & RADIX_TREE_HEIGHT_MASK;
1321 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1323 for ( ; height > 1; height--) {
1324 i = (index >> shift) & RADIX_TREE_MAP_MASK;
1326 if (slot->slots[i] != NULL)
1328 index &= ~((1UL << shift) - 1);
1329 index += 1UL << shift;
1331 goto out; /* 32-bit wraparound */
1333 if (i == RADIX_TREE_MAP_SIZE)
1337 slot = rcu_dereference_raw(slot->slots[i]);
1340 if (!radix_tree_is_indirect_ptr(slot)) {
1342 *found_index = index + i;
1349 slot = indirect_to_ptr(slot);
1350 shift -= RADIX_TREE_MAP_SHIFT;
1353 /* Bottom level: check items */
1354 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1355 if (slot->slots[i] == item) {
1356 *found_index = index + i;
1361 index += RADIX_TREE_MAP_SIZE;
1367 * radix_tree_locate_item - search through radix tree for item
1368 * @root: radix tree root
1369 * @item: item to be found
1371 * Returns index where item was found, or -1 if not found.
1372 * Caller must hold no lock (since this time-consuming function needs
1373 * to be preemptible), and must check afterwards if item is still there.
1375 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1377 struct radix_tree_node *node;
1378 unsigned long max_index;
1379 unsigned long cur_index = 0;
1380 unsigned long found_index = -1;
1384 node = rcu_dereference_raw(root->rnode);
1385 if (!radix_tree_is_indirect_ptr(node)) {
1392 node = indirect_to_ptr(node);
1393 max_index = radix_tree_maxindex(node->path &
1394 RADIX_TREE_HEIGHT_MASK);
1395 if (cur_index > max_index) {
1400 cur_index = __locate(node, item, cur_index, &found_index);
1403 } while (cur_index != 0 && cur_index <= max_index);
1408 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1412 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1415 * radix_tree_shrink - shrink height of a radix tree to minimal
1416 * @root radix tree root
1418 static inline void radix_tree_shrink(struct radix_tree_root *root)
1420 /* try to shrink tree height */
1421 while (root->height > 0) {
1422 struct radix_tree_node *to_free = root->rnode;
1423 struct radix_tree_node *slot;
1425 BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1426 to_free = indirect_to_ptr(to_free);
1429 * The candidate node has more than one child, or its child
1430 * is not at the leftmost slot, or it is a multiorder entry,
1433 if (to_free->count != 1)
1435 slot = to_free->slots[0];
1438 if (!radix_tree_is_indirect_ptr(slot) && (root->height > 1))
1441 if (radix_tree_is_indirect_ptr(slot)) {
1442 slot = indirect_to_ptr(slot);
1443 slot->parent = NULL;
1444 slot = ptr_to_indirect(slot);
1448 * We don't need rcu_assign_pointer(), since we are simply
1449 * moving the node from one part of the tree to another: if it
1450 * was safe to dereference the old pointer to it
1451 * (to_free->slots[0]), it will be safe to dereference the new
1452 * one (root->rnode) as far as dependent read barriers go.
1458 * We have a dilemma here. The node's slot[0] must not be
1459 * NULLed in case there are concurrent lookups expecting to
1460 * find the item. However if this was a bottom-level node,
1461 * then it may be subject to the slot pointer being visible
1462 * to callers dereferencing it. If item corresponding to
1463 * slot[0] is subsequently deleted, these callers would expect
1464 * their slot to become empty sooner or later.
1466 * For example, lockless pagecache will look up a slot, deref
1467 * the page pointer, and if the page is 0 refcount it means it
1468 * was concurrently deleted from pagecache so try the deref
1469 * again. Fortunately there is already a requirement for logic
1470 * to retry the entire slot lookup -- the indirect pointer
1471 * problem (replacing direct root node with an indirect pointer
1472 * also results in a stale slot). So tag the slot as indirect
1473 * to force callers to retry.
1475 if (!radix_tree_is_indirect_ptr(slot))
1476 to_free->slots[0] = RADIX_TREE_RETRY;
1478 radix_tree_node_free(to_free);
1483 * __radix_tree_delete_node - try to free node after clearing a slot
1484 * @root: radix tree root
1485 * @node: node containing @index
1487 * After clearing the slot at @index in @node from radix tree
1488 * rooted at @root, call this function to attempt freeing the
1489 * node and shrinking the tree.
1491 * Returns %true if @node was freed, %false otherwise.
1493 bool __radix_tree_delete_node(struct radix_tree_root *root,
1494 struct radix_tree_node *node)
1496 bool deleted = false;
1499 struct radix_tree_node *parent;
1502 if (node == indirect_to_ptr(root->rnode)) {
1503 radix_tree_shrink(root);
1504 if (root->height == 0)
1510 parent = node->parent;
1512 unsigned int offset;
1514 offset = node->path >> RADIX_TREE_HEIGHT_SHIFT;
1515 parent->slots[offset] = NULL;
1518 root_tag_clear_all(root);
1523 radix_tree_node_free(node);
1532 static inline void delete_sibling_entries(struct radix_tree_node *node,
1533 void *ptr, unsigned offset)
1535 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1537 for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
1538 if (node->slots[offset + i] != ptr)
1540 node->slots[offset + i] = NULL;
1547 * radix_tree_delete_item - delete an item from a radix tree
1548 * @root: radix tree root
1550 * @item: expected item
1552 * Remove @item at @index from the radix tree rooted at @root.
1554 * Returns the address of the deleted item, or NULL if it was not present
1555 * or the entry at the given @index was not @item.
1557 void *radix_tree_delete_item(struct radix_tree_root *root,
1558 unsigned long index, void *item)
1560 struct radix_tree_node *node;
1561 unsigned int offset;
1566 entry = __radix_tree_lookup(root, index, &node, &slot);
1570 if (item && entry != item)
1574 root_tag_clear_all(root);
1579 offset = get_slot_offset(node, slot);
1582 * Clear all tags associated with the item to be deleted.
1583 * This way of doing it would be inefficient, but seldom is any set.
1585 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1586 if (tag_get(node, tag, offset))
1587 radix_tree_tag_clear(root, index, tag);
1590 delete_sibling_entries(node, ptr_to_indirect(slot), offset);
1591 node->slots[offset] = NULL;
1594 __radix_tree_delete_node(root, node);
1598 EXPORT_SYMBOL(radix_tree_delete_item);
1601 * radix_tree_delete - delete an item from a radix tree
1602 * @root: radix tree root
1605 * Remove the item at @index from the radix tree rooted at @root.
1607 * Returns the address of the deleted item, or NULL if it was not present.
1609 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1611 return radix_tree_delete_item(root, index, NULL);
1613 EXPORT_SYMBOL(radix_tree_delete);
1616 * radix_tree_tagged - test whether any items in the tree are tagged
1617 * @root: radix tree root
1620 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1622 return root_tag_get(root, tag);
1624 EXPORT_SYMBOL(radix_tree_tagged);
1627 radix_tree_node_ctor(void *arg)
1629 struct radix_tree_node *node = arg;
1631 memset(node, 0, sizeof(*node));
1632 INIT_LIST_HEAD(&node->private_list);
1635 static __init unsigned long __maxindex(unsigned int height)
1637 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1638 int shift = RADIX_TREE_INDEX_BITS - width;
1642 if (shift >= BITS_PER_LONG)
1644 return ~0UL >> shift;
1647 static __init void radix_tree_init_maxindex(void)
1651 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1652 height_to_maxindex[i] = __maxindex(i);
1655 static int radix_tree_callback(struct notifier_block *nfb,
1656 unsigned long action,
1659 int cpu = (long)hcpu;
1660 struct radix_tree_preload *rtp;
1661 struct radix_tree_node *node;
1663 /* Free per-cpu pool of perloaded nodes */
1664 if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1665 rtp = &per_cpu(radix_tree_preloads, cpu);
1668 rtp->nodes = node->private_data;
1669 kmem_cache_free(radix_tree_node_cachep, node);
1676 void __init radix_tree_init(void)
1678 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1679 sizeof(struct radix_tree_node), 0,
1680 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1681 radix_tree_node_ctor);
1682 radix_tree_init_maxindex();
1683 hotcpu_notifier(radix_tree_callback, 0);