]> git.karo-electronics.de Git - linux-beck.git/commitdiff
radix-tree: add radix_tree_split_preload()
authorMatthew Wilcox <willy@linux.intel.com>
Wed, 14 Dec 2016 23:09:04 +0000 (15:09 -0800)
committerLinus Torvalds <torvalds@linux-foundation.org>
Thu, 15 Dec 2016 00:04:10 +0000 (16:04 -0800)
Calculate how many nodes we need to allocate to split an old_order entry
into multiple entries, each of size new_order.  The test suite checks
that we allocated exactly the right number of nodes; neither too many
(checked by rtp->nr == 0), nor too few (checked by comparing
nr_allocated before and after the call to radix_tree_split()).

Link: http://lkml.kernel.org/r/1480369871-5271-60-git-send-email-mawilcox@linuxonhyperv.com
Signed-off-by: Matthew Wilcox <willy@linux.intel.com>
Tested-by: Kirill A. Shutemov <kirill.shutemov@linux.intel.com>
Cc: Konstantin Khlebnikov <koct9i@gmail.com>
Cc: Ross Zwisler <ross.zwisler@linux.intel.com>
Cc: Matthew Wilcox <mawilcox@microsoft.com>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
include/linux/radix-tree.h
lib/radix-tree.c
tools/testing/radix-tree/multiorder.c
tools/testing/radix-tree/test.h

index 1f4b56120de8ad21e54d4b6de73f65458d8b5b68..5dea8f6440e44f8d7345289847deaa9e9434e136 100644 (file)
@@ -345,6 +345,7 @@ static inline void radix_tree_preload_end(void)
        preempt_enable();
 }
 
+int radix_tree_split_preload(unsigned old_order, unsigned new_order, gfp_t);
 int radix_tree_split(struct radix_tree_root *, unsigned long index,
                        unsigned new_order);
 int radix_tree_join(struct radix_tree_root *, unsigned long index,
index ade2ed3f5190ce6a25a4a6baec9a59f5f33a6b15..be1183e62590fd8cd739224dcebc06f2dc6fd102 100644 (file)
@@ -368,7 +368,7 @@ radix_tree_node_free(struct radix_tree_node *node)
  * To make use of this facility, the radix tree must be initialised without
  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
  */
-static int __radix_tree_preload(gfp_t gfp_mask, int nr)
+static int __radix_tree_preload(gfp_t gfp_mask, unsigned nr)
 {
        struct radix_tree_preload *rtp;
        struct radix_tree_node *node;
@@ -434,6 +434,28 @@ int radix_tree_maybe_preload(gfp_t gfp_mask)
 }
 EXPORT_SYMBOL(radix_tree_maybe_preload);
 
+#ifdef CONFIG_RADIX_TREE_MULTIORDER
+/*
+ * Preload with enough objects to ensure that we can split a single entry
+ * of order @old_order into many entries of size @new_order
+ */
+int radix_tree_split_preload(unsigned int old_order, unsigned int new_order,
+                                                       gfp_t gfp_mask)
+{
+       unsigned top = 1 << (old_order % RADIX_TREE_MAP_SHIFT);
+       unsigned layers = (old_order / RADIX_TREE_MAP_SHIFT) -
+                               (new_order / RADIX_TREE_MAP_SHIFT);
+       unsigned nr = 0;
+
+       WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
+       BUG_ON(new_order >= old_order);
+
+       while (layers--)
+               nr = nr * RADIX_TREE_MAP_SIZE + 1;
+       return __radix_tree_preload(gfp_mask, top * nr);
+}
+#endif
+
 /*
  * The same as function above, but preload number of nodes required to insert
  * (1 << order) continuous naturally-aligned elements.
index fa6effe997a3da99daa9c529156f1d50ae6f8195..5421f015f46c0bd4dce6f7b14c508a1d324a4570 100644 (file)
@@ -389,35 +389,67 @@ static void multiorder_join(void)
        }
 }
 
+static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc)
+{
+       struct radix_tree_preload *rtp = &radix_tree_preloads;
+       if (rtp->nr != 0)
+               printf("split(%u %u) remaining %u\n", old_order, new_order,
+                                                       rtp->nr);
+       /*
+        * Can't check for equality here as some nodes may have been
+        * RCU-freed while we ran.  But we should never finish with more
+        * nodes allocated since they should have all been preloaded.
+        */
+       if (nr_allocated > alloc)
+               printf("split(%u %u) allocated %u %u\n", old_order, new_order,
+                                                       alloc, nr_allocated);
+}
+
 static void __multiorder_split(int old_order, int new_order)
 {
-       RADIX_TREE(tree, GFP_KERNEL);
+       RADIX_TREE(tree, GFP_ATOMIC);
        void **slot;
        struct radix_tree_iter iter;
        struct radix_tree_node *node;
        void *item;
+       unsigned alloc;
+
+       radix_tree_preload(GFP_KERNEL);
+       assert(item_insert_order(&tree, 0, old_order) == 0);
+       radix_tree_preload_end();
+
+       /* Wipe out the preloaded cache or it'll confuse check_mem() */
+       radix_tree_cpu_dead(0);
 
-       item_insert_order(&tree, 0, old_order);
        radix_tree_tag_set(&tree, 0, 2);
+
+       radix_tree_split_preload(old_order, new_order, GFP_KERNEL);
+       alloc = nr_allocated;
        radix_tree_split(&tree, 0, new_order);
+       check_mem(old_order, new_order, alloc);
        radix_tree_for_each_slot(slot, &tree, &iter, 0) {
                radix_tree_iter_replace(&tree, &iter, slot,
                                        item_create(iter.index, new_order));
        }
+       radix_tree_preload_end();
 
        item_kill_tree(&tree);
 
+       radix_tree_preload(GFP_KERNEL);
        __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
+       radix_tree_preload_end();
 
        item = __radix_tree_lookup(&tree, 0, &node, NULL);
        assert(item == (void *)0x12);
        assert(node->exceptional > 0);
 
+       radix_tree_split_preload(old_order, new_order, GFP_KERNEL);
        radix_tree_split(&tree, 0, new_order);
        radix_tree_for_each_slot(slot, &tree, &iter, 0) {
                radix_tree_iter_replace(&tree, &iter, slot,
                                        item_create(iter.index, new_order));
        }
+       radix_tree_preload_end();
 
        item = __radix_tree_lookup(&tree, 0, &node, NULL);
        assert(item != (void *)0x12);
@@ -425,16 +457,20 @@ static void __multiorder_split(int old_order, int new_order)
 
        item_kill_tree(&tree);
 
+       radix_tree_preload(GFP_KERNEL);
        __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
+       radix_tree_preload_end();
 
        item = __radix_tree_lookup(&tree, 0, &node, NULL);
        assert(item == (void *)0x12);
        assert(node->exceptional > 0);
 
+       radix_tree_split_preload(old_order, new_order, GFP_KERNEL);
        radix_tree_split(&tree, 0, new_order);
        radix_tree_for_each_slot(slot, &tree, &iter, 0) {
                radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16);
        }
+       radix_tree_preload_end();
 
        item = __radix_tree_lookup(&tree, 0, &node, NULL);
        assert(item == (void *)0x16);
@@ -471,4 +507,6 @@ void multiorder_checks(void)
        multiorder_tagged_iteration();
        multiorder_join();
        multiorder_split();
+
+       radix_tree_cpu_dead(0);
 }
index e11e4d260b4ef4fc2ba4af597314707361485663..7c2611caa6d2868b7e366fb4e1c60c889dd85a0c 100644 (file)
@@ -52,3 +52,8 @@ int root_tag_get(struct radix_tree_root *root, unsigned int tag);
 unsigned long node_maxindex(struct radix_tree_node *);
 unsigned long shift_maxindex(unsigned int shift);
 int radix_tree_cpu_dead(unsigned int cpu);
+struct radix_tree_preload {
+       unsigned nr;
+       struct radix_tree_node *nodes;
+};
+extern struct radix_tree_preload radix_tree_preloads;