2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 * Copyright (C) 2016 Intel, Matthew Wilcox
8 * Copyright (C) 2016 Intel, Ross Zwisler
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License as
12 * published by the Free Software Foundation; either version 2, or (at
13 * your option) any later version.
15 * This program is distributed in the hope that it will be useful, but
16 * WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 #include <linux/errno.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/export.h>
29 #include <linux/radix-tree.h>
30 #include <linux/percpu.h>
31 #include <linux/slab.h>
32 #include <linux/kmemleak.h>
33 #include <linux/notifier.h>
34 #include <linux/cpu.h>
35 #include <linux/string.h>
36 #include <linux/bitops.h>
37 #include <linux/rcupdate.h>
38 #include <linux/preempt.h> /* in_interrupt() */
41 /* Number of nodes in fully populated tree of given height */
42 static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
45 * Radix tree node cache.
47 static struct kmem_cache *radix_tree_node_cachep;
50 * The radix tree is variable-height, so an insert operation not only has
51 * to build the branch to its corresponding item, it also has to build the
52 * branch to existing items if the size has to be increased (by
55 * The worst case is a zero height tree with just a single item at index 0,
56 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
57 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
60 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
63 * Per-cpu pool of preloaded nodes
65 struct radix_tree_preload {
67 /* nodes->private_data points to next preallocated node */
68 struct radix_tree_node *nodes;
70 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
72 static inline struct radix_tree_node *entry_to_node(void *ptr)
74 return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE);
77 static inline void *node_to_entry(void *ptr)
79 return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
82 #define RADIX_TREE_RETRY node_to_entry(NULL)
84 #ifdef CONFIG_RADIX_TREE_MULTIORDER
85 /* Sibling slots point directly to another slot in the same node */
86 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
89 return (parent->slots <= ptr) &&
90 (ptr < parent->slots + RADIX_TREE_MAP_SIZE);
93 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
99 static inline unsigned long get_slot_offset(struct radix_tree_node *parent,
102 return slot - parent->slots;
105 static unsigned int radix_tree_descend(struct radix_tree_node *parent,
106 struct radix_tree_node **nodep, unsigned long index)
108 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
109 void **entry = rcu_dereference_raw(parent->slots[offset]);
111 #ifdef CONFIG_RADIX_TREE_MULTIORDER
112 if (radix_tree_is_internal_node(entry)) {
113 if (is_sibling_entry(parent, entry)) {
114 void **sibentry = (void **) entry_to_node(entry);
115 offset = get_slot_offset(parent, sibentry);
116 entry = rcu_dereference_raw(*sibentry);
121 *nodep = (void *)entry;
125 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
127 return root->gfp_mask & __GFP_BITS_MASK;
130 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
133 __set_bit(offset, node->tags[tag]);
136 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
139 __clear_bit(offset, node->tags[tag]);
142 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
145 return test_bit(offset, node->tags[tag]);
148 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
150 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
153 static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
155 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
158 static inline void root_tag_clear_all(struct radix_tree_root *root)
160 root->gfp_mask &= __GFP_BITS_MASK;
163 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
165 return (__force int)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
168 static inline unsigned root_tags_get(struct radix_tree_root *root)
170 return (__force unsigned)root->gfp_mask >> __GFP_BITS_SHIFT;
174 * Returns 1 if any slot in the node has this tag set.
175 * Otherwise returns 0.
177 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
180 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
181 if (node->tags[tag][idx])
188 * radix_tree_find_next_bit - find the next set bit in a memory region
190 * @addr: The address to base the search on
191 * @size: The bitmap size in bits
192 * @offset: The bitnumber to start searching at
194 * Unrollable variant of find_next_bit() for constant size arrays.
195 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
196 * Returns next bit offset, or size if nothing found.
198 static __always_inline unsigned long
199 radix_tree_find_next_bit(struct radix_tree_node *node, unsigned int tag,
200 unsigned long offset)
202 const unsigned long *addr = node->tags[tag];
204 if (offset < RADIX_TREE_MAP_SIZE) {
207 addr += offset / BITS_PER_LONG;
208 tmp = *addr >> (offset % BITS_PER_LONG);
210 return __ffs(tmp) + offset;
211 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
212 while (offset < RADIX_TREE_MAP_SIZE) {
215 return __ffs(tmp) + offset;
216 offset += BITS_PER_LONG;
219 return RADIX_TREE_MAP_SIZE;
222 static unsigned int iter_offset(const struct radix_tree_iter *iter)
224 return (iter->index >> iter_shift(iter)) & RADIX_TREE_MAP_MASK;
228 * The maximum index which can be stored in a radix tree
230 static inline unsigned long shift_maxindex(unsigned int shift)
232 return (RADIX_TREE_MAP_SIZE << shift) - 1;
235 static inline unsigned long node_maxindex(struct radix_tree_node *node)
237 return shift_maxindex(node->shift);
241 static void dump_node(struct radix_tree_node *node, unsigned long index)
245 pr_debug("radix node: %p offset %d indices %lu-%lu parent %p tags %lx %lx %lx shift %d count %d exceptional %d\n",
246 node, node->offset, index, index | node_maxindex(node),
248 node->tags[0][0], node->tags[1][0], node->tags[2][0],
249 node->shift, node->count, node->exceptional);
251 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
252 unsigned long first = index | (i << node->shift);
253 unsigned long last = first | ((1UL << node->shift) - 1);
254 void *entry = node->slots[i];
257 if (entry == RADIX_TREE_RETRY) {
258 pr_debug("radix retry offset %ld indices %lu-%lu parent %p\n",
259 i, first, last, node);
260 } else if (!radix_tree_is_internal_node(entry)) {
261 pr_debug("radix entry %p offset %ld indices %lu-%lu parent %p\n",
262 entry, i, first, last, node);
263 } else if (is_sibling_entry(node, entry)) {
264 pr_debug("radix sblng %p offset %ld indices %lu-%lu parent %p val %p\n",
265 entry, i, first, last, node,
266 *(void **)entry_to_node(entry));
268 dump_node(entry_to_node(entry), first);
274 static void radix_tree_dump(struct radix_tree_root *root)
276 pr_debug("radix root: %p rnode %p tags %x\n",
278 root->gfp_mask >> __GFP_BITS_SHIFT);
279 if (!radix_tree_is_internal_node(root->rnode))
281 dump_node(entry_to_node(root->rnode), 0);
286 * This assumes that the caller has performed appropriate preallocation, and
287 * that the caller has pinned this thread of control to the current CPU.
289 static struct radix_tree_node *
290 radix_tree_node_alloc(struct radix_tree_root *root)
292 struct radix_tree_node *ret = NULL;
293 gfp_t gfp_mask = root_gfp_mask(root);
296 * Preload code isn't irq safe and it doesn't make sense to use
297 * preloading during an interrupt anyway as all the allocations have
298 * to be atomic. So just do normal allocation when in interrupt.
300 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
301 struct radix_tree_preload *rtp;
304 * Even if the caller has preloaded, try to allocate from the
305 * cache first for the new node to get accounted to the memory
308 ret = kmem_cache_alloc(radix_tree_node_cachep,
309 gfp_mask | __GFP_NOWARN);
314 * Provided the caller has preloaded here, we will always
315 * succeed in getting a node here (and never reach
318 rtp = this_cpu_ptr(&radix_tree_preloads);
321 rtp->nodes = ret->private_data;
322 ret->private_data = NULL;
326 * Update the allocation stack trace as this is more useful
329 kmemleak_update_trace(ret);
332 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
334 BUG_ON(radix_tree_is_internal_node(ret));
338 static void radix_tree_node_rcu_free(struct rcu_head *head)
340 struct radix_tree_node *node =
341 container_of(head, struct radix_tree_node, rcu_head);
344 * Must only free zeroed nodes into the slab. We can be left with
345 * non-NULL entries by radix_tree_free_nodes, so clear the entries
348 memset(node->slots, 0, sizeof(node->slots));
349 memset(node->tags, 0, sizeof(node->tags));
350 INIT_LIST_HEAD(&node->private_list);
352 kmem_cache_free(radix_tree_node_cachep, node);
356 radix_tree_node_free(struct radix_tree_node *node)
358 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
362 * Load up this CPU's radix_tree_node buffer with sufficient objects to
363 * ensure that the addition of a single element in the tree cannot fail. On
364 * success, return zero, with preemption disabled. On error, return -ENOMEM
365 * with preemption not disabled.
367 * To make use of this facility, the radix tree must be initialised without
368 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
370 static int __radix_tree_preload(gfp_t gfp_mask, int nr)
372 struct radix_tree_preload *rtp;
373 struct radix_tree_node *node;
377 * Nodes preloaded by one cgroup can be be used by another cgroup, so
378 * they should never be accounted to any particular memory cgroup.
380 gfp_mask &= ~__GFP_ACCOUNT;
383 rtp = this_cpu_ptr(&radix_tree_preloads);
384 while (rtp->nr < nr) {
386 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
390 rtp = this_cpu_ptr(&radix_tree_preloads);
392 node->private_data = rtp->nodes;
396 kmem_cache_free(radix_tree_node_cachep, node);
405 * Load up this CPU's radix_tree_node buffer with sufficient objects to
406 * ensure that the addition of a single element in the tree cannot fail. On
407 * success, return zero, with preemption disabled. On error, return -ENOMEM
408 * with preemption not disabled.
410 * To make use of this facility, the radix tree must be initialised without
411 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
413 int radix_tree_preload(gfp_t gfp_mask)
415 /* Warn on non-sensical use... */
416 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
417 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
419 EXPORT_SYMBOL(radix_tree_preload);
422 * The same as above function, except we don't guarantee preloading happens.
423 * We do it, if we decide it helps. On success, return zero with preemption
424 * disabled. On error, return -ENOMEM with preemption not disabled.
426 int radix_tree_maybe_preload(gfp_t gfp_mask)
428 if (gfpflags_allow_blocking(gfp_mask))
429 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
430 /* Preloading doesn't help anything with this gfp mask, skip it */
434 EXPORT_SYMBOL(radix_tree_maybe_preload);
437 * The same as function above, but preload number of nodes required to insert
438 * (1 << order) continuous naturally-aligned elements.
440 int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order)
442 unsigned long nr_subtrees;
443 int nr_nodes, subtree_height;
445 /* Preloading doesn't help anything with this gfp mask, skip it */
446 if (!gfpflags_allow_blocking(gfp_mask)) {
452 * Calculate number and height of fully populated subtrees it takes to
453 * store (1 << order) elements.
455 nr_subtrees = 1 << order;
456 for (subtree_height = 0; nr_subtrees > RADIX_TREE_MAP_SIZE;
458 nr_subtrees >>= RADIX_TREE_MAP_SHIFT;
461 * The worst case is zero height tree with a single item at index 0 and
462 * then inserting items starting at ULONG_MAX - (1 << order).
464 * This requires RADIX_TREE_MAX_PATH nodes to build branch from root to
467 nr_nodes = RADIX_TREE_MAX_PATH;
469 /* Plus branch to fully populated subtrees. */
470 nr_nodes += RADIX_TREE_MAX_PATH - subtree_height;
472 /* Root node is shared. */
475 /* Plus nodes required to build subtrees. */
476 nr_nodes += nr_subtrees * height_to_maxnodes[subtree_height];
478 return __radix_tree_preload(gfp_mask, nr_nodes);
481 static unsigned radix_tree_load_root(struct radix_tree_root *root,
482 struct radix_tree_node **nodep, unsigned long *maxindex)
484 struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
488 if (likely(radix_tree_is_internal_node(node))) {
489 node = entry_to_node(node);
490 *maxindex = node_maxindex(node);
491 return node->shift + RADIX_TREE_MAP_SHIFT;
499 * Extend a radix tree so it can store key @index.
501 static int radix_tree_extend(struct radix_tree_root *root,
502 unsigned long index, unsigned int shift)
504 struct radix_tree_node *slot;
505 unsigned int maxshift;
508 /* Figure out what the shift should be. */
510 while (index > shift_maxindex(maxshift))
511 maxshift += RADIX_TREE_MAP_SHIFT;
518 struct radix_tree_node *node = radix_tree_node_alloc(root);
523 /* Propagate the aggregated tag info into the new root */
524 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
525 if (root_tag_get(root, tag))
526 tag_set(node, tag, 0);
529 BUG_ON(shift > BITS_PER_LONG);
534 if (radix_tree_is_internal_node(slot)) {
535 entry_to_node(slot)->parent = node;
537 /* Moving an exceptional root->rnode to a node */
538 if (radix_tree_exceptional_entry(slot))
539 node->exceptional = 1;
541 node->slots[0] = slot;
542 slot = node_to_entry(node);
543 rcu_assign_pointer(root->rnode, slot);
544 shift += RADIX_TREE_MAP_SHIFT;
545 } while (shift <= maxshift);
547 return maxshift + RADIX_TREE_MAP_SHIFT;
551 * radix_tree_shrink - shrink radix tree to minimum height
552 * @root radix tree root
554 static inline void radix_tree_shrink(struct radix_tree_root *root,
555 radix_tree_update_node_t update_node,
559 struct radix_tree_node *node = root->rnode;
560 struct radix_tree_node *child;
562 if (!radix_tree_is_internal_node(node))
564 node = entry_to_node(node);
567 * The candidate node has more than one child, or its child
568 * is not at the leftmost slot, or the child is a multiorder
569 * entry, we cannot shrink.
571 if (node->count != 1)
573 child = node->slots[0];
576 if (!radix_tree_is_internal_node(child) && node->shift)
579 if (radix_tree_is_internal_node(child))
580 entry_to_node(child)->parent = NULL;
583 * We don't need rcu_assign_pointer(), since we are simply
584 * moving the node from one part of the tree to another: if it
585 * was safe to dereference the old pointer to it
586 * (node->slots[0]), it will be safe to dereference the new
587 * one (root->rnode) as far as dependent read barriers go.
592 * We have a dilemma here. The node's slot[0] must not be
593 * NULLed in case there are concurrent lookups expecting to
594 * find the item. However if this was a bottom-level node,
595 * then it may be subject to the slot pointer being visible
596 * to callers dereferencing it. If item corresponding to
597 * slot[0] is subsequently deleted, these callers would expect
598 * their slot to become empty sooner or later.
600 * For example, lockless pagecache will look up a slot, deref
601 * the page pointer, and if the page has 0 refcount it means it
602 * was concurrently deleted from pagecache so try the deref
603 * again. Fortunately there is already a requirement for logic
604 * to retry the entire slot lookup -- the indirect pointer
605 * problem (replacing direct root node with an indirect pointer
606 * also results in a stale slot). So tag the slot as indirect
607 * to force callers to retry.
610 if (!radix_tree_is_internal_node(child)) {
611 node->slots[0] = RADIX_TREE_RETRY;
613 update_node(node, private);
616 radix_tree_node_free(node);
620 static void delete_node(struct radix_tree_root *root,
621 struct radix_tree_node *node,
622 radix_tree_update_node_t update_node, void *private)
625 struct radix_tree_node *parent;
628 if (node == entry_to_node(root->rnode))
629 radix_tree_shrink(root, update_node, private);
633 parent = node->parent;
635 parent->slots[node->offset] = NULL;
638 root_tag_clear_all(root);
642 radix_tree_node_free(node);
649 * __radix_tree_create - create a slot in a radix tree
650 * @root: radix tree root
652 * @order: index occupies 2^order aligned slots
653 * @nodep: returns node
654 * @slotp: returns slot
656 * Create, if necessary, and return the node and slot for an item
657 * at position @index in the radix tree @root.
659 * Until there is more than one item in the tree, no nodes are
660 * allocated and @root->rnode is used as a direct slot instead of
661 * pointing to a node, in which case *@nodep will be NULL.
663 * Returns -ENOMEM, or 0 for success.
665 int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
666 unsigned order, struct radix_tree_node **nodep,
669 struct radix_tree_node *node = NULL, *child;
670 void **slot = (void **)&root->rnode;
671 unsigned long maxindex;
672 unsigned int shift, offset = 0;
673 unsigned long max = index | ((1UL << order) - 1);
675 shift = radix_tree_load_root(root, &child, &maxindex);
677 /* Make sure the tree is high enough. */
678 if (order > 0 && max == ((1UL << order) - 1))
680 if (max > maxindex) {
681 int error = radix_tree_extend(root, max, shift);
688 while (shift > order) {
689 shift -= RADIX_TREE_MAP_SHIFT;
691 /* Have to add a child node. */
692 child = radix_tree_node_alloc(root);
695 child->shift = shift;
696 child->offset = offset;
698 child->exceptional = 0;
699 child->parent = node;
700 rcu_assign_pointer(*slot, node_to_entry(child));
703 } else if (!radix_tree_is_internal_node(child))
706 /* Go a level down */
707 node = entry_to_node(child);
708 offset = radix_tree_descend(node, &child, index);
709 slot = &node->slots[offset];
719 #ifdef CONFIG_RADIX_TREE_MULTIORDER
721 * Free any nodes below this node. The tree is presumed to not need
722 * shrinking, and any user data in the tree is presumed to not need a
723 * destructor called on it. If we need to add a destructor, we can
724 * add that functionality later. Note that we may not clear tags or
725 * slots from the tree as an RCU walker may still have a pointer into
726 * this subtree. We could replace the entries with RADIX_TREE_RETRY,
727 * but we'll still have to clear those in rcu_free.
729 static void radix_tree_free_nodes(struct radix_tree_node *node)
732 struct radix_tree_node *child = entry_to_node(node);
735 void *entry = child->slots[offset];
736 if (radix_tree_is_internal_node(entry) &&
737 !is_sibling_entry(child, entry)) {
738 child = entry_to_node(entry);
743 while (offset == RADIX_TREE_MAP_SIZE) {
744 struct radix_tree_node *old = child;
745 offset = child->offset + 1;
746 child = child->parent;
747 radix_tree_node_free(old);
748 if (old == entry_to_node(node))
754 static inline int insert_entries(struct radix_tree_node *node, void **slot,
755 void *item, unsigned order, bool replace)
757 struct radix_tree_node *child;
758 unsigned i, n, tag, offset, tags = 0;
761 n = 1 << (order - node->shift);
762 offset = get_slot_offset(node, slot);
769 offset = offset & ~(n - 1);
770 slot = &node->slots[offset];
772 child = node_to_entry(slot);
774 for (i = 0; i < n; i++) {
778 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
779 if (tag_get(node, tag, offset + i))
786 for (i = 0; i < n; i++) {
787 struct radix_tree_node *old = slot[i];
789 rcu_assign_pointer(slot[i], child);
790 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
791 if (tags & (1 << tag))
792 tag_clear(node, tag, offset + i);
794 rcu_assign_pointer(slot[i], item);
795 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
796 if (tags & (1 << tag))
797 tag_set(node, tag, offset);
799 if (radix_tree_is_internal_node(old) &&
800 !is_sibling_entry(node, old))
801 radix_tree_free_nodes(old);
802 if (radix_tree_exceptional_entry(old))
807 if (radix_tree_exceptional_entry(item))
808 node->exceptional += n;
813 static inline int insert_entries(struct radix_tree_node *node, void **slot,
814 void *item, unsigned order, bool replace)
818 rcu_assign_pointer(*slot, item);
821 if (radix_tree_exceptional_entry(item))
829 * __radix_tree_insert - insert into a radix tree
830 * @root: radix tree root
832 * @order: key covers the 2^order indices around index
833 * @item: item to insert
835 * Insert an item into the radix tree at position @index.
837 int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
838 unsigned order, void *item)
840 struct radix_tree_node *node;
844 BUG_ON(radix_tree_is_internal_node(item));
846 error = __radix_tree_create(root, index, order, &node, &slot);
850 error = insert_entries(node, slot, item, order, false);
855 unsigned offset = get_slot_offset(node, slot);
856 BUG_ON(tag_get(node, 0, offset));
857 BUG_ON(tag_get(node, 1, offset));
858 BUG_ON(tag_get(node, 2, offset));
860 BUG_ON(root_tags_get(root));
865 EXPORT_SYMBOL(__radix_tree_insert);
868 * __radix_tree_lookup - lookup an item in a radix tree
869 * @root: radix tree root
871 * @nodep: returns node
872 * @slotp: returns slot
874 * Lookup and return the item at position @index in the radix
877 * Until there is more than one item in the tree, no nodes are
878 * allocated and @root->rnode is used as a direct slot instead of
879 * pointing to a node, in which case *@nodep will be NULL.
881 void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
882 struct radix_tree_node **nodep, void ***slotp)
884 struct radix_tree_node *node, *parent;
885 unsigned long maxindex;
890 slot = (void **)&root->rnode;
891 radix_tree_load_root(root, &node, &maxindex);
892 if (index > maxindex)
895 while (radix_tree_is_internal_node(node)) {
898 if (node == RADIX_TREE_RETRY)
900 parent = entry_to_node(node);
901 offset = radix_tree_descend(parent, &node, index);
902 slot = parent->slots + offset;
913 * radix_tree_lookup_slot - lookup a slot in a radix tree
914 * @root: radix tree root
917 * Returns: the slot corresponding to the position @index in the
918 * radix tree @root. This is useful for update-if-exists operations.
920 * This function can be called under rcu_read_lock iff the slot is not
921 * modified by radix_tree_replace_slot, otherwise it must be called
922 * exclusive from other writers. Any dereference of the slot must be done
923 * using radix_tree_deref_slot.
925 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
929 if (!__radix_tree_lookup(root, index, NULL, &slot))
933 EXPORT_SYMBOL(radix_tree_lookup_slot);
936 * radix_tree_lookup - perform lookup operation on a radix tree
937 * @root: radix tree root
940 * Lookup the item at the position @index in the radix tree @root.
942 * This function can be called under rcu_read_lock, however the caller
943 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
944 * them safely). No RCU barriers are required to access or modify the
945 * returned item, however.
947 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
949 return __radix_tree_lookup(root, index, NULL, NULL);
951 EXPORT_SYMBOL(radix_tree_lookup);
953 static void replace_slot(struct radix_tree_root *root,
954 struct radix_tree_node *node,
955 void **slot, void *item,
956 bool warn_typeswitch)
958 void *old = rcu_dereference_raw(*slot);
959 int count, exceptional;
961 WARN_ON_ONCE(radix_tree_is_internal_node(item));
963 count = !!item - !!old;
964 exceptional = !!radix_tree_exceptional_entry(item) -
965 !!radix_tree_exceptional_entry(old);
967 WARN_ON_ONCE(warn_typeswitch && (count || exceptional));
970 node->count += count;
971 node->exceptional += exceptional;
974 rcu_assign_pointer(*slot, item);
978 * __radix_tree_replace - replace item in a slot
979 * @root: radix tree root
980 * @node: pointer to tree node
981 * @slot: pointer to slot in @node
982 * @item: new item to store in the slot.
983 * @update_node: callback for changing leaf nodes
984 * @private: private data to pass to @update_node
986 * For use with __radix_tree_lookup(). Caller must hold tree write locked
987 * across slot lookup and replacement.
989 void __radix_tree_replace(struct radix_tree_root *root,
990 struct radix_tree_node *node,
991 void **slot, void *item,
992 radix_tree_update_node_t update_node, void *private)
995 * This function supports replacing exceptional entries and
996 * deleting entries, but that needs accounting against the
997 * node unless the slot is root->rnode.
999 replace_slot(root, node, slot, item,
1000 !node && slot != (void **)&root->rnode);
1006 update_node(node, private);
1008 delete_node(root, node, update_node, private);
1012 * radix_tree_replace_slot - replace item in a slot
1013 * @root: radix tree root
1014 * @slot: pointer to slot
1015 * @item: new item to store in the slot.
1017 * For use with radix_tree_lookup_slot(), radix_tree_gang_lookup_slot(),
1018 * radix_tree_gang_lookup_tag_slot(). Caller must hold tree write locked
1019 * across slot lookup and replacement.
1021 * NOTE: This cannot be used to switch between non-entries (empty slots),
1022 * regular entries, and exceptional entries, as that requires accounting
1023 * inside the radix tree node. When switching from one type of entry or
1024 * deleting, use __radix_tree_lookup() and __radix_tree_replace().
1026 void radix_tree_replace_slot(struct radix_tree_root *root,
1027 void **slot, void *item)
1029 replace_slot(root, NULL, slot, item, true);
1032 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1034 * radix_tree_join - replace multiple entries with one multiorder entry
1035 * @root: radix tree root
1036 * @index: an index inside the new entry
1037 * @order: order of the new entry
1040 * Call this function to replace several entries with one larger entry.
1041 * The existing entries are presumed to not need freeing as a result of
1044 * The replacement entry will have all the tags set on it that were set
1045 * on any of the entries it is replacing.
1047 int radix_tree_join(struct radix_tree_root *root, unsigned long index,
1048 unsigned order, void *item)
1050 struct radix_tree_node *node;
1054 BUG_ON(radix_tree_is_internal_node(item));
1056 error = __radix_tree_create(root, index, order, &node, &slot);
1058 error = insert_entries(node, slot, item, order, true);
1067 * radix_tree_tag_set - set a tag on a radix tree node
1068 * @root: radix tree root
1072 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
1073 * corresponding to @index in the radix tree. From
1074 * the root all the way down to the leaf node.
1076 * Returns the address of the tagged item. Setting a tag on a not-present
1079 void *radix_tree_tag_set(struct radix_tree_root *root,
1080 unsigned long index, unsigned int tag)
1082 struct radix_tree_node *node, *parent;
1083 unsigned long maxindex;
1085 radix_tree_load_root(root, &node, &maxindex);
1086 BUG_ON(index > maxindex);
1088 while (radix_tree_is_internal_node(node)) {
1091 parent = entry_to_node(node);
1092 offset = radix_tree_descend(parent, &node, index);
1095 if (!tag_get(parent, tag, offset))
1096 tag_set(parent, tag, offset);
1099 /* set the root's tag bit */
1100 if (!root_tag_get(root, tag))
1101 root_tag_set(root, tag);
1105 EXPORT_SYMBOL(radix_tree_tag_set);
1107 static void node_tag_clear(struct radix_tree_root *root,
1108 struct radix_tree_node *node,
1109 unsigned int tag, unsigned int offset)
1112 if (!tag_get(node, tag, offset))
1114 tag_clear(node, tag, offset);
1115 if (any_tag_set(node, tag))
1118 offset = node->offset;
1119 node = node->parent;
1122 /* clear the root's tag bit */
1123 if (root_tag_get(root, tag))
1124 root_tag_clear(root, tag);
1127 static void node_tag_set(struct radix_tree_root *root,
1128 struct radix_tree_node *node,
1129 unsigned int tag, unsigned int offset)
1132 if (tag_get(node, tag, offset))
1134 tag_set(node, tag, offset);
1135 offset = node->offset;
1136 node = node->parent;
1139 if (!root_tag_get(root, tag))
1140 root_tag_set(root, tag);
1144 * radix_tree_iter_tag_set - set a tag on the current iterator entry
1145 * @root: radix tree root
1146 * @iter: iterator state
1149 void radix_tree_iter_tag_set(struct radix_tree_root *root,
1150 const struct radix_tree_iter *iter, unsigned int tag)
1152 node_tag_set(root, iter->node, tag, iter_offset(iter));
1156 * radix_tree_tag_clear - clear a tag on a radix tree node
1157 * @root: radix tree root
1161 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
1162 * corresponding to @index in the radix tree. If this causes
1163 * the leaf node to have no tags set then clear the tag in the
1164 * next-to-leaf node, etc.
1166 * Returns the address of the tagged item on success, else NULL. ie:
1167 * has the same return value and semantics as radix_tree_lookup().
1169 void *radix_tree_tag_clear(struct radix_tree_root *root,
1170 unsigned long index, unsigned int tag)
1172 struct radix_tree_node *node, *parent;
1173 unsigned long maxindex;
1174 int uninitialized_var(offset);
1176 radix_tree_load_root(root, &node, &maxindex);
1177 if (index > maxindex)
1182 while (radix_tree_is_internal_node(node)) {
1183 parent = entry_to_node(node);
1184 offset = radix_tree_descend(parent, &node, index);
1188 node_tag_clear(root, parent, tag, offset);
1192 EXPORT_SYMBOL(radix_tree_tag_clear);
1195 * radix_tree_tag_get - get a tag on a radix tree node
1196 * @root: radix tree root
1198 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
1202 * 0: tag not present or not set
1205 * Note that the return value of this function may not be relied on, even if
1206 * the RCU lock is held, unless tag modification and node deletion are excluded
1209 int radix_tree_tag_get(struct radix_tree_root *root,
1210 unsigned long index, unsigned int tag)
1212 struct radix_tree_node *node, *parent;
1213 unsigned long maxindex;
1215 if (!root_tag_get(root, tag))
1218 radix_tree_load_root(root, &node, &maxindex);
1219 if (index > maxindex)
1224 while (radix_tree_is_internal_node(node)) {
1227 parent = entry_to_node(node);
1228 offset = radix_tree_descend(parent, &node, index);
1232 if (!tag_get(parent, tag, offset))
1234 if (node == RADIX_TREE_RETRY)
1240 EXPORT_SYMBOL(radix_tree_tag_get);
1242 static inline void __set_iter_shift(struct radix_tree_iter *iter,
1245 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1246 iter->shift = shift;
1250 /* Construct iter->tags bit-mask from node->tags[tag] array */
1251 static void set_iter_tags(struct radix_tree_iter *iter,
1252 struct radix_tree_node *node, unsigned offset,
1255 unsigned tag_long = offset / BITS_PER_LONG;
1256 unsigned tag_bit = offset % BITS_PER_LONG;
1258 iter->tags = node->tags[tag][tag_long] >> tag_bit;
1260 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
1261 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
1262 /* Pick tags from next element */
1264 iter->tags |= node->tags[tag][tag_long + 1] <<
1265 (BITS_PER_LONG - tag_bit);
1266 /* Clip chunk size, here only BITS_PER_LONG tags */
1267 iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG);
1271 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1272 static void **skip_siblings(struct radix_tree_node **nodep,
1273 void **slot, struct radix_tree_iter *iter)
1275 void *sib = node_to_entry(slot - 1);
1277 while (iter->index < iter->next_index) {
1278 *nodep = rcu_dereference_raw(*slot);
1279 if (*nodep && *nodep != sib)
1282 iter->index = __radix_tree_iter_add(iter, 1);
1290 void ** __radix_tree_next_slot(void **slot, struct radix_tree_iter *iter,
1293 unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1294 struct radix_tree_node *node = rcu_dereference_raw(*slot);
1296 slot = skip_siblings(&node, slot, iter);
1298 while (radix_tree_is_internal_node(node)) {
1300 unsigned long next_index;
1302 if (node == RADIX_TREE_RETRY)
1304 node = entry_to_node(node);
1306 iter->shift = node->shift;
1308 if (flags & RADIX_TREE_ITER_TAGGED) {
1309 offset = radix_tree_find_next_bit(node, tag, 0);
1310 if (offset == RADIX_TREE_MAP_SIZE)
1312 slot = &node->slots[offset];
1313 iter->index = __radix_tree_iter_add(iter, offset);
1314 set_iter_tags(iter, node, offset, tag);
1315 node = rcu_dereference_raw(*slot);
1318 slot = &node->slots[0];
1320 node = rcu_dereference_raw(*slot);
1325 if (offset == RADIX_TREE_MAP_SIZE)
1328 iter->index = __radix_tree_iter_add(iter, offset);
1330 if ((flags & RADIX_TREE_ITER_CONTIG) && (offset > 0))
1332 next_index = (iter->index | shift_maxindex(iter->shift)) + 1;
1333 if (next_index < iter->next_index)
1334 iter->next_index = next_index;
1339 iter->next_index = 0;
1342 EXPORT_SYMBOL(__radix_tree_next_slot);
1344 static void **skip_siblings(struct radix_tree_node **nodep,
1345 void **slot, struct radix_tree_iter *iter)
1351 void **radix_tree_iter_resume(void **slot, struct radix_tree_iter *iter)
1353 struct radix_tree_node *node;
1356 iter->index = __radix_tree_iter_add(iter, 1);
1357 node = rcu_dereference_raw(*slot);
1358 skip_siblings(&node, slot, iter);
1359 iter->next_index = iter->index;
1363 EXPORT_SYMBOL(radix_tree_iter_resume);
1366 * radix_tree_next_chunk - find next chunk of slots for iteration
1368 * @root: radix tree root
1369 * @iter: iterator state
1370 * @flags: RADIX_TREE_ITER_* flags and tag index
1371 * Returns: pointer to chunk first slot, or NULL if iteration is over
1373 void **radix_tree_next_chunk(struct radix_tree_root *root,
1374 struct radix_tree_iter *iter, unsigned flags)
1376 unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1377 struct radix_tree_node *node, *child;
1378 unsigned long index, offset, maxindex;
1380 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
1384 * Catch next_index overflow after ~0UL. iter->index never overflows
1385 * during iterating; it can be zero only at the beginning.
1386 * And we cannot overflow iter->next_index in a single step,
1387 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
1389 * This condition also used by radix_tree_next_slot() to stop
1390 * contiguous iterating, and forbid switching to the next chunk.
1392 index = iter->next_index;
1393 if (!index && iter->index)
1397 radix_tree_load_root(root, &child, &maxindex);
1398 if (index > maxindex)
1403 if (!radix_tree_is_internal_node(child)) {
1404 /* Single-slot tree */
1405 iter->index = index;
1406 iter->next_index = maxindex + 1;
1409 __set_iter_shift(iter, 0);
1410 return (void **)&root->rnode;
1414 node = entry_to_node(child);
1415 offset = radix_tree_descend(node, &child, index);
1417 if ((flags & RADIX_TREE_ITER_TAGGED) ?
1418 !tag_get(node, tag, offset) : !child) {
1420 if (flags & RADIX_TREE_ITER_CONTIG)
1423 if (flags & RADIX_TREE_ITER_TAGGED)
1424 offset = radix_tree_find_next_bit(node, tag,
1427 while (++offset < RADIX_TREE_MAP_SIZE) {
1428 void *slot = node->slots[offset];
1429 if (is_sibling_entry(node, slot))
1434 index &= ~node_maxindex(node);
1435 index += offset << node->shift;
1436 /* Overflow after ~0UL */
1439 if (offset == RADIX_TREE_MAP_SIZE)
1441 child = rcu_dereference_raw(node->slots[offset]);
1444 if ((child == NULL) || (child == RADIX_TREE_RETRY))
1446 } while (radix_tree_is_internal_node(child));
1448 /* Update the iterator state */
1449 iter->index = (index &~ node_maxindex(node)) | (offset << node->shift);
1450 iter->next_index = (index | node_maxindex(node)) + 1;
1452 __set_iter_shift(iter, node->shift);
1454 if (flags & RADIX_TREE_ITER_TAGGED)
1455 set_iter_tags(iter, node, offset, tag);
1457 return node->slots + offset;
1459 EXPORT_SYMBOL(radix_tree_next_chunk);
1462 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1463 * @root: radix tree root
1464 * @results: where the results of the lookup are placed
1465 * @first_index: start the lookup from this key
1466 * @max_items: place up to this many items at *results
1468 * Performs an index-ascending scan of the tree for present items. Places
1469 * them at *@results and returns the number of items which were placed at
1472 * The implementation is naive.
1474 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1475 * rcu_read_lock. In this case, rather than the returned results being
1476 * an atomic snapshot of the tree at a single point in time, the
1477 * semantics of an RCU protected gang lookup are as though multiple
1478 * radix_tree_lookups have been issued in individual locks, and results
1479 * stored in 'results'.
1482 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1483 unsigned long first_index, unsigned int max_items)
1485 struct radix_tree_iter iter;
1487 unsigned int ret = 0;
1489 if (unlikely(!max_items))
1492 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1493 results[ret] = rcu_dereference_raw(*slot);
1496 if (radix_tree_is_internal_node(results[ret])) {
1497 slot = radix_tree_iter_retry(&iter);
1500 if (++ret == max_items)
1506 EXPORT_SYMBOL(radix_tree_gang_lookup);
1509 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1510 * @root: radix tree root
1511 * @results: where the results of the lookup are placed
1512 * @indices: where their indices should be placed (but usually NULL)
1513 * @first_index: start the lookup from this key
1514 * @max_items: place up to this many items at *results
1516 * Performs an index-ascending scan of the tree for present items. Places
1517 * their slots at *@results and returns the number of items which were
1518 * placed at *@results.
1520 * The implementation is naive.
1522 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1523 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1524 * protection, radix_tree_deref_slot may fail requiring a retry.
1527 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1528 void ***results, unsigned long *indices,
1529 unsigned long first_index, unsigned int max_items)
1531 struct radix_tree_iter iter;
1533 unsigned int ret = 0;
1535 if (unlikely(!max_items))
1538 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1539 results[ret] = slot;
1541 indices[ret] = iter.index;
1542 if (++ret == max_items)
1548 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1551 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1553 * @root: radix tree root
1554 * @results: where the results of the lookup are placed
1555 * @first_index: start the lookup from this key
1556 * @max_items: place up to this many items at *results
1557 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1559 * Performs an index-ascending scan of the tree for present items which
1560 * have the tag indexed by @tag set. Places the items at *@results and
1561 * returns the number of items which were placed at *@results.
1564 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1565 unsigned long first_index, unsigned int max_items,
1568 struct radix_tree_iter iter;
1570 unsigned int ret = 0;
1572 if (unlikely(!max_items))
1575 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1576 results[ret] = rcu_dereference_raw(*slot);
1579 if (radix_tree_is_internal_node(results[ret])) {
1580 slot = radix_tree_iter_retry(&iter);
1583 if (++ret == max_items)
1589 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1592 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1593 * radix tree based on a tag
1594 * @root: radix tree root
1595 * @results: where the results of the lookup are placed
1596 * @first_index: start the lookup from this key
1597 * @max_items: place up to this many items at *results
1598 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1600 * Performs an index-ascending scan of the tree for present items which
1601 * have the tag indexed by @tag set. Places the slots at *@results and
1602 * returns the number of slots which were placed at *@results.
1605 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1606 unsigned long first_index, unsigned int max_items,
1609 struct radix_tree_iter iter;
1611 unsigned int ret = 0;
1613 if (unlikely(!max_items))
1616 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1617 results[ret] = slot;
1618 if (++ret == max_items)
1624 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1627 * __radix_tree_delete_node - try to free node after clearing a slot
1628 * @root: radix tree root
1629 * @node: node containing @index
1631 * After clearing the slot at @index in @node from radix tree
1632 * rooted at @root, call this function to attempt freeing the
1633 * node and shrinking the tree.
1635 void __radix_tree_delete_node(struct radix_tree_root *root,
1636 struct radix_tree_node *node)
1638 delete_node(root, node, NULL, NULL);
1641 static inline void delete_sibling_entries(struct radix_tree_node *node,
1642 void *ptr, unsigned offset)
1644 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1646 for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
1647 if (node->slots[offset + i] != ptr)
1649 node->slots[offset + i] = NULL;
1656 * radix_tree_delete_item - delete an item from a radix tree
1657 * @root: radix tree root
1659 * @item: expected item
1661 * Remove @item at @index from the radix tree rooted at @root.
1663 * Returns the address of the deleted item, or NULL if it was not present
1664 * or the entry at the given @index was not @item.
1666 void *radix_tree_delete_item(struct radix_tree_root *root,
1667 unsigned long index, void *item)
1669 struct radix_tree_node *node;
1670 unsigned int offset;
1675 entry = __radix_tree_lookup(root, index, &node, &slot);
1679 if (item && entry != item)
1683 root_tag_clear_all(root);
1688 offset = get_slot_offset(node, slot);
1690 /* Clear all tags associated with the item to be deleted. */
1691 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1692 node_tag_clear(root, node, tag, offset);
1694 delete_sibling_entries(node, node_to_entry(slot), offset);
1695 __radix_tree_replace(root, node, slot, NULL, NULL, NULL);
1699 EXPORT_SYMBOL(radix_tree_delete_item);
1702 * radix_tree_delete - delete an item from a radix tree
1703 * @root: radix tree root
1706 * Remove the item at @index from the radix tree rooted at @root.
1708 * Returns the address of the deleted item, or NULL if it was not present.
1710 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1712 return radix_tree_delete_item(root, index, NULL);
1714 EXPORT_SYMBOL(radix_tree_delete);
1716 void radix_tree_clear_tags(struct radix_tree_root *root,
1717 struct radix_tree_node *node,
1721 unsigned int tag, offset = get_slot_offset(node, slot);
1722 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1723 node_tag_clear(root, node, tag, offset);
1725 /* Clear root node tags */
1726 root->gfp_mask &= __GFP_BITS_MASK;
1731 * radix_tree_tagged - test whether any items in the tree are tagged
1732 * @root: radix tree root
1735 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1737 return root_tag_get(root, tag);
1739 EXPORT_SYMBOL(radix_tree_tagged);
1742 radix_tree_node_ctor(void *arg)
1744 struct radix_tree_node *node = arg;
1746 memset(node, 0, sizeof(*node));
1747 INIT_LIST_HEAD(&node->private_list);
1750 static __init unsigned long __maxindex(unsigned int height)
1752 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1753 int shift = RADIX_TREE_INDEX_BITS - width;
1757 if (shift >= BITS_PER_LONG)
1759 return ~0UL >> shift;
1762 static __init void radix_tree_init_maxnodes(void)
1764 unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1];
1767 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1768 height_to_maxindex[i] = __maxindex(i);
1769 for (i = 0; i < ARRAY_SIZE(height_to_maxnodes); i++) {
1770 for (j = i; j > 0; j--)
1771 height_to_maxnodes[i] += height_to_maxindex[j - 1] + 1;
1775 static int radix_tree_cpu_dead(unsigned int cpu)
1777 struct radix_tree_preload *rtp;
1778 struct radix_tree_node *node;
1780 /* Free per-cpu pool of preloaded nodes */
1781 rtp = &per_cpu(radix_tree_preloads, cpu);
1784 rtp->nodes = node->private_data;
1785 kmem_cache_free(radix_tree_node_cachep, node);
1791 void __init radix_tree_init(void)
1794 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1795 sizeof(struct radix_tree_node), 0,
1796 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1797 radix_tree_node_ctor);
1798 radix_tree_init_maxnodes();
1799 ret = cpuhp_setup_state_nocalls(CPUHP_RADIX_DEAD, "lib/radix:dead",
1800 NULL, radix_tree_cpu_dead);