Commit 4bd6dded authored by Liam R. Howlett's avatar Liam R. Howlett Committed by Andrew Morton

test_maple_tree: add more testing for mas_empty_area()

Test robust filling of an entire area of the tree, then test one beyond. 
This is to test the walking back up the tree at the end of nodes and error
condition.  Test inspired by the reproducer code provided by Snild Dolkow.

The last test in the function tests for the case of a corrupted maple
state caused by the incorrect limits set during mas_skip_node().  There
needs to be a gap in the second last child and last child, but the search
must rule out the second last child's gap.  This would avoid correcting
the maple state to the correct max limit and return an error.

Link: https://lkml.kernel.org/r/20230307180247.2220303-3-Liam.Howlett@oracle.com
Cc: Snild Dolkow <snild@sony.com>
Link: https://lore.kernel.org/linux-mm/cb8dc31a-fef2-1d09-f133-e9f7b9f9e77a@sony.com/
Fixes: e15e06a8 ("lib/test_maple_tree: add testing for maple tree")
Signed-off-by: default avatarLiam R. Howlett <Liam.Howlett@oracle.com>
Cc: Peng Zhang <zhangpeng.00@bytedance.com>
Cc: <stable@vger.kernel.org>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
parent 0fa99fdf
...@@ -2670,6 +2670,49 @@ static noinline void check_empty_area_window(struct maple_tree *mt) ...@@ -2670,6 +2670,49 @@ static noinline void check_empty_area_window(struct maple_tree *mt)
rcu_read_unlock(); rcu_read_unlock();
} }
static noinline void check_empty_area_fill(struct maple_tree *mt)
{
const unsigned long max = 0x25D78000;
unsigned long size;
int loop, shift;
MA_STATE(mas, mt, 0, 0);
mt_set_non_kernel(99999);
for (shift = 12; shift <= 16; shift++) {
loop = 5000;
size = 1 << shift;
while (loop--) {
mas_set(&mas, 0);
mas_lock(&mas);
MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
MT_BUG_ON(mt, mas.last != mas.index + size - 1);
mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
mas_unlock(&mas);
mas_reset(&mas);
}
}
/* No space left. */
size = 0x1000;
rcu_read_lock();
MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
rcu_read_unlock();
/* Fill a depth 3 node to the maximum */
for (unsigned long i = 629440511; i <= 629440800; i += 6)
mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
/* Make space in the second-last depth 4 node */
mtree_erase(mt, 631668735);
/* Make space in the last depth 4 node */
mtree_erase(mt, 629506047);
mas_reset(&mas);
/* Search from just after the gap in the second-last depth 4 */
rcu_read_lock();
MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
rcu_read_unlock();
mt_set_non_kernel(0);
}
static DEFINE_MTREE(tree); static DEFINE_MTREE(tree);
static int maple_tree_seed(void) static int maple_tree_seed(void)
{ {
...@@ -2926,6 +2969,11 @@ static int maple_tree_seed(void) ...@@ -2926,6 +2969,11 @@ static int maple_tree_seed(void)
check_empty_area_window(&tree); check_empty_area_window(&tree);
mtree_destroy(&tree); mtree_destroy(&tree);
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_empty_area_fill(&tree);
mtree_destroy(&tree);
#if defined(BENCH) #if defined(BENCH)
skip: skip:
#endif #endif
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment