1 // SPDX-License-Identifier: GPL-2.0+
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
7 * Any tests that only require the interface of the tree.
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt) do {} while (0)
20 #define mt_validate(mt) do {} while (0)
21 #define mt_cache_shrink() do {} while (0)
22 #define mas_dump(mas) do {} while (0)
23 #define mas_wr_dump(mas) do {} while (0)
24 atomic_t maple_tree_tests_run
;
25 atomic_t maple_tree_tests_passed
;
28 #define MT_BUG_ON(__tree, __x) do { \
29 atomic_inc(&maple_tree_tests_run); \
31 pr_info("BUG at %s:%d (%u)\n", \
32 __func__, __LINE__, __x); \
33 pr_info("Pass: %u Run:%u\n", \
34 atomic_read(&maple_tree_tests_passed), \
35 atomic_read(&maple_tree_tests_run)); \
37 atomic_inc(&maple_tree_tests_passed); \
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_LOAD */
47 /* #define BENCH_MT_FOR_EACH */
48 /* #define BENCH_FORK */
49 /* #define BENCH_MAS_FOR_EACH */
50 /* #define BENCH_MAS_PREV */
53 #define mt_set_non_kernel(x) do {} while (0)
54 #define mt_zero_nr_tallocated(x) do {} while (0)
56 #define cond_resched() do {} while (0)
59 #define mas_is_none(x) ((x)->status == ma_none)
60 #define mas_is_overflow(x) ((x)->status == ma_overflow)
61 #define mas_is_underflow(x) ((x)->status == ma_underflow)
63 static int __init
mtree_insert_index(struct maple_tree
*mt
,
64 unsigned long index
, gfp_t gfp
)
66 return mtree_insert(mt
, index
, xa_mk_value(index
& LONG_MAX
), gfp
);
69 static void __init
mtree_erase_index(struct maple_tree
*mt
, unsigned long index
)
71 MT_BUG_ON(mt
, mtree_erase(mt
, index
) != xa_mk_value(index
& LONG_MAX
));
72 MT_BUG_ON(mt
, mtree_load(mt
, index
) != NULL
);
75 static int __init
mtree_test_insert(struct maple_tree
*mt
, unsigned long index
,
78 return mtree_insert(mt
, index
, ptr
, GFP_KERNEL
);
81 static int __init
mtree_test_store_range(struct maple_tree
*mt
,
82 unsigned long start
, unsigned long end
, void *ptr
)
84 return mtree_store_range(mt
, start
, end
, ptr
, GFP_KERNEL
);
87 static int __init
mtree_test_store(struct maple_tree
*mt
, unsigned long start
,
90 return mtree_test_store_range(mt
, start
, start
, ptr
);
93 static int __init
mtree_test_insert_range(struct maple_tree
*mt
,
94 unsigned long start
, unsigned long end
, void *ptr
)
96 return mtree_insert_range(mt
, start
, end
, ptr
, GFP_KERNEL
);
99 static void __init
*mtree_test_load(struct maple_tree
*mt
, unsigned long index
)
101 return mtree_load(mt
, index
);
104 static void __init
*mtree_test_erase(struct maple_tree
*mt
, unsigned long index
)
106 return mtree_erase(mt
, index
);
109 #if defined(CONFIG_64BIT)
110 static noinline
void __init
check_mtree_alloc_range(struct maple_tree
*mt
,
111 unsigned long start
, unsigned long end
, unsigned long size
,
112 unsigned long expected
, int eret
, void *ptr
)
115 unsigned long result
= expected
+ 1;
118 ret
= mtree_alloc_range(mt
, &result
, ptr
, size
, start
, end
,
120 MT_BUG_ON(mt
, ret
!= eret
);
124 MT_BUG_ON(mt
, result
!= expected
);
127 static noinline
void __init
check_mtree_alloc_rrange(struct maple_tree
*mt
,
128 unsigned long start
, unsigned long end
, unsigned long size
,
129 unsigned long expected
, int eret
, void *ptr
)
132 unsigned long result
= expected
+ 1;
135 ret
= mtree_alloc_rrange(mt
, &result
, ptr
, size
, start
, end
,
137 MT_BUG_ON(mt
, ret
!= eret
);
141 MT_BUG_ON(mt
, result
!= expected
);
145 static noinline
void __init
check_load(struct maple_tree
*mt
,
146 unsigned long index
, void *ptr
)
148 void *ret
= mtree_test_load(mt
, index
);
151 pr_err("Load %lu returned %p expect %p\n", index
, ret
, ptr
);
152 MT_BUG_ON(mt
, ret
!= ptr
);
155 static noinline
void __init
check_store_range(struct maple_tree
*mt
,
156 unsigned long start
, unsigned long end
, void *ptr
, int expected
)
161 ret
= mtree_test_store_range(mt
, start
, end
, ptr
);
162 MT_BUG_ON(mt
, ret
!= expected
);
167 for (i
= start
; i
<= end
; i
++)
168 check_load(mt
, i
, ptr
);
171 static noinline
void __init
check_insert_range(struct maple_tree
*mt
,
172 unsigned long start
, unsigned long end
, void *ptr
, int expected
)
177 ret
= mtree_test_insert_range(mt
, start
, end
, ptr
);
178 MT_BUG_ON(mt
, ret
!= expected
);
183 for (i
= start
; i
<= end
; i
++)
184 check_load(mt
, i
, ptr
);
187 static noinline
void __init
check_insert(struct maple_tree
*mt
,
188 unsigned long index
, void *ptr
)
192 ret
= mtree_test_insert(mt
, index
, ptr
);
193 MT_BUG_ON(mt
, ret
!= 0);
196 static noinline
void __init
check_dup_insert(struct maple_tree
*mt
,
197 unsigned long index
, void *ptr
)
201 ret
= mtree_test_insert(mt
, index
, ptr
);
202 MT_BUG_ON(mt
, ret
!= -EEXIST
);
206 static noinline
void __init
check_index_load(struct maple_tree
*mt
,
209 return check_load(mt
, index
, xa_mk_value(index
& LONG_MAX
));
212 static inline __init
int not_empty(struct maple_node
*node
)
219 for (i
= 0; i
< ARRAY_SIZE(node
->slot
); i
++)
227 static noinline
void __init
check_rev_seq(struct maple_tree
*mt
,
228 unsigned long max
, bool verbose
)
230 unsigned long i
= max
, j
;
232 MT_BUG_ON(mt
, !mtree_empty(mt
));
234 mt_zero_nr_tallocated();
236 MT_BUG_ON(mt
, mtree_insert_index(mt
, i
, GFP_KERNEL
));
237 for (j
= i
; j
<= max
; j
++)
238 check_index_load(mt
, j
);
240 check_load(mt
, i
- 1, NULL
);
242 MT_BUG_ON(mt
, !mt_height(mt
));
244 MT_BUG_ON(mt
, !mt_height(mt
));
247 check_load(mt
, max
+ 1, NULL
);
252 mt_dump(mt
, mt_dump_dec
);
253 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
254 __func__
, max
, mt_get_alloc_size()/1024, mt_nr_allocated(),
260 static noinline
void __init
check_seq(struct maple_tree
*mt
, unsigned long max
,
265 MT_BUG_ON(mt
, !mtree_empty(mt
));
267 mt_zero_nr_tallocated();
268 for (i
= 0; i
<= max
; i
++) {
269 MT_BUG_ON(mt
, mtree_insert_index(mt
, i
, GFP_KERNEL
));
270 for (j
= 0; j
<= i
; j
++)
271 check_index_load(mt
, j
);
274 MT_BUG_ON(mt
, !mt_height(mt
));
275 check_load(mt
, i
+ 1, NULL
);
281 mt_dump(mt
, mt_dump_dec
);
282 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
283 max
, mt_get_alloc_size()/1024, mt_nr_allocated(),
289 static noinline
void __init
check_lb_not_empty(struct maple_tree
*mt
)
292 unsigned long huge
= 4000UL * 1000 * 1000;
297 check_insert(mt
, i
, (void *) i
);
298 for (j
= huge
; j
>= i
; j
/= 2) {
299 check_load(mt
, j
-1, NULL
);
300 check_load(mt
, j
, (void *) j
);
301 check_load(mt
, j
+1, NULL
);
308 static noinline
void __init
check_lower_bound_split(struct maple_tree
*mt
)
310 MT_BUG_ON(mt
, !mtree_empty(mt
));
311 check_lb_not_empty(mt
);
314 static noinline
void __init
check_upper_bound_split(struct maple_tree
*mt
)
319 MT_BUG_ON(mt
, !mtree_empty(mt
));
324 huge
= 4000UL * 1000 * 1000;
328 check_insert(mt
, i
, (void *) i
);
329 for (j
= i
; j
>= huge
; j
*= 2) {
330 check_load(mt
, j
-1, NULL
);
331 check_load(mt
, j
, (void *) j
);
332 check_load(mt
, j
+1, NULL
);
339 static noinline
void __init
check_mid_split(struct maple_tree
*mt
)
341 unsigned long huge
= 8000UL * 1000 * 1000;
343 check_insert(mt
, huge
, (void *) huge
);
344 check_insert(mt
, 0, xa_mk_value(0));
345 check_lb_not_empty(mt
);
348 static noinline
void __init
check_rev_find(struct maple_tree
*mt
)
350 int i
, nr_entries
= 200;
352 MA_STATE(mas
, mt
, 0, 0);
354 for (i
= 0; i
<= nr_entries
; i
++)
355 mtree_store_range(mt
, i
*10, i
*10 + 5,
356 xa_mk_value(i
), GFP_KERNEL
);
360 val
= mas_find_rev(&mas
, 1000);
361 MT_BUG_ON(mt
, val
!= xa_mk_value(100));
362 val
= mas_find_rev(&mas
, 1000);
363 MT_BUG_ON(mt
, val
!= NULL
);
366 val
= mas_find_rev(&mas
, 997);
367 MT_BUG_ON(mt
, val
!= NULL
);
370 val
= mas_find_rev(&mas
, 900);
371 MT_BUG_ON(mt
, val
!= xa_mk_value(100));
372 val
= mas_find_rev(&mas
, 900);
373 MT_BUG_ON(mt
, val
!= xa_mk_value(99));
376 val
= mas_find_rev(&mas
, 0);
377 MT_BUG_ON(mt
, val
!= xa_mk_value(2));
378 val
= mas_find_rev(&mas
, 0);
379 MT_BUG_ON(mt
, val
!= xa_mk_value(1));
380 val
= mas_find_rev(&mas
, 0);
381 MT_BUG_ON(mt
, val
!= xa_mk_value(0));
382 val
= mas_find_rev(&mas
, 0);
383 MT_BUG_ON(mt
, val
!= NULL
);
387 static noinline
void __init
check_find(struct maple_tree
*mt
)
389 unsigned long val
= 0;
393 unsigned long last
= 0, index
= 0;
394 void *entry
, *entry2
;
396 MA_STATE(mas
, mt
, 0, 0);
399 MT_BUG_ON(mt
, mtree_insert_index(mt
, val
++, GFP_KERNEL
));
401 #if defined(CONFIG_64BIT)
402 top
= 4398046511104UL;
413 for (int i
= 0; i
<= count
; i
++) {
415 MT_BUG_ON(mt
, mtree_insert_index(mt
, val
, GFP_KERNEL
));
417 MT_BUG_ON(mt
, mtree_insert(mt
, val
,
418 XA_ZERO_ENTRY
, GFP_KERNEL
));
426 while ((entry
= mas_find(&mas
, 268435456)) != NULL
) {
428 MT_BUG_ON(mt
, xa_mk_value(val
) != entry
);
430 MT_BUG_ON(mt
, entry
!= XA_ZERO_ENTRY
);
433 /* For zero check. */
442 mas_for_each(&mas
, entry
, ULONG_MAX
) {
444 MT_BUG_ON(mt
, xa_mk_value(val
) != entry
);
446 MT_BUG_ON(mt
, entry
!= XA_ZERO_ENTRY
);
448 /* For zero check. */
458 mas_for_each(&mas
, entry
, ULONG_MAX
) {
460 MT_BUG_ON(mt
, xa_mk_value(val
) != entry
);
462 MT_BUG_ON(mt
, entry
!= XA_ZERO_ENTRY
);
464 /* For zero check. */
475 max
= 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
476 mt_for_each(mt
, entry
, index
, max
) {
477 MT_BUG_ON(mt
, xa_mk_value(val
) != entry
);
479 if (val
== 64) /* Skip zero entry. */
481 /* For zero check. */
489 MT_BUG_ON(mt
, mtree_insert_index(mt
, ULONG_MAX
, GFP_KERNEL
));
490 mt_for_each(mt
, entry
, index
, ULONG_MAX
) {
492 MT_BUG_ON(mt
, entry
!= xa_mk_value(LONG_MAX
));
494 MT_BUG_ON(mt
, xa_mk_value(val
) != entry
);
496 /* Workaround for 32bit */
497 if ((val
<< 2) < val
)
502 if (val
== 64) /* Skip zero entry. */
504 /* For zero check. */
508 MT_BUG_ON(mt
, max
> 25);
510 mtree_erase_index(mt
, ULONG_MAX
);
514 entry
= mt_find(mt
, &index
, 512);
515 MT_BUG_ON(mt
, xa_mk_value(256) != entry
);
519 entry
= mt_find(mt
, &index
, 20);
520 MT_BUG_ON(mt
, entry
!= NULL
);
524 /* Insert ULONG_MAX */
525 MT_BUG_ON(mt
, mtree_insert_index(mt
, ULONG_MAX
, GFP_KERNEL
));
530 mas_for_each(&mas
, entry
, ULONG_MAX
) {
532 MT_BUG_ON(mt
, entry
!= XA_ZERO_ENTRY
);
534 MT_BUG_ON(mt
, entry
!= xa_mk_value(LONG_MAX
));
536 MT_BUG_ON(mt
, xa_mk_value(val
) != entry
);
538 /* Workaround for 32bit */
539 if ((val
<< 2) < val
)
544 /* For zero check. */
553 mas_set(&mas
, 1048576);
555 entry
= mas_find(&mas
, 1048576);
557 MT_BUG_ON(mas
.tree
, entry
== NULL
);
561 * 1. get the expected value, leveraging the existence of an end entry
562 * 2. delete end entry
563 * 3. find the last value but searching for ULONG_MAX and then using
566 /* First, get the expected result. */
569 mas
.index
= ULONG_MAX
; /* start at max.. */
570 entry
= mas_find(&mas
, ULONG_MAX
);
571 entry
= mas_prev(&mas
, 0);
575 /* Erase the last entry. */
577 mas
.index
= ULONG_MAX
;
578 mas
.last
= ULONG_MAX
;
581 /* Get the previous value from MAS_START */
583 entry2
= mas_prev(&mas
, 0);
586 MT_BUG_ON(mt
, entry
!= entry2
);
587 MT_BUG_ON(mt
, index
!= mas
.index
);
588 MT_BUG_ON(mt
, last
!= mas
.last
);
591 mas
.status
= ma_none
;
592 mas
.index
= ULONG_MAX
;
593 mas
.last
= ULONG_MAX
;
594 entry2
= mas_prev(&mas
, 0);
595 MT_BUG_ON(mt
, entry
!= entry2
);
598 MT_BUG_ON(mt
, mas_prev(&mas
, 0) != NULL
);
604 static noinline
void __init
check_find_2(struct maple_tree
*mt
)
609 MA_STATE(mas
, mt
, 0, 0);
611 mas_for_each(&mas
, entry
, ULONG_MAX
)
615 for (i
= 0; i
< 256; i
++) {
616 mtree_insert_index(mt
, i
, GFP_KERNEL
);
620 mas_for_each(&mas
, entry
, ULONG_MAX
) {
621 MT_BUG_ON(mt
, entry
!= xa_mk_value(j
));
625 MT_BUG_ON(mt
, j
!= i
+ 1);
628 for (i
= 0; i
< 256; i
++) {
629 mtree_erase_index(mt
, i
);
633 mas_for_each(&mas
, entry
, ULONG_MAX
) {
634 if (xa_is_zero(entry
))
637 MT_BUG_ON(mt
, entry
!= xa_mk_value(j
));
641 MT_BUG_ON(mt
, j
!= 256);
644 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
648 #if defined(CONFIG_64BIT)
649 static noinline
void __init
check_alloc_rev_range(struct maple_tree
*mt
)
653 * cat /proc/self/maps | awk '{print $1}'|
654 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
657 static const unsigned long range
[] = {
658 /* Inclusive , Exclusive. */
659 0x565234af2000, 0x565234af4000,
660 0x565234af4000, 0x565234af9000,
661 0x565234af9000, 0x565234afb000,
662 0x565234afc000, 0x565234afd000,
663 0x565234afd000, 0x565234afe000,
664 0x565235def000, 0x565235e10000,
665 0x7f36d4bfd000, 0x7f36d4ee2000,
666 0x7f36d4ee2000, 0x7f36d4f04000,
667 0x7f36d4f04000, 0x7f36d504c000,
668 0x7f36d504c000, 0x7f36d5098000,
669 0x7f36d5098000, 0x7f36d5099000,
670 0x7f36d5099000, 0x7f36d509d000,
671 0x7f36d509d000, 0x7f36d509f000,
672 0x7f36d509f000, 0x7f36d50a5000,
673 0x7f36d50b9000, 0x7f36d50db000,
674 0x7f36d50db000, 0x7f36d50dc000,
675 0x7f36d50dc000, 0x7f36d50fa000,
676 0x7f36d50fa000, 0x7f36d5102000,
677 0x7f36d5102000, 0x7f36d5103000,
678 0x7f36d5103000, 0x7f36d5104000,
679 0x7f36d5104000, 0x7f36d5105000,
680 0x7fff5876b000, 0x7fff5878d000,
681 0x7fff5878e000, 0x7fff58791000,
682 0x7fff58791000, 0x7fff58793000,
685 static const unsigned long holes
[] = {
687 * Note: start of hole is INCLUSIVE
688 * end of hole is EXCLUSIVE
689 * (opposite of the above table.)
690 * Start of hole, end of hole, size of hole (+1)
692 0x565234afb000, 0x565234afc000, 0x1000,
693 0x565234afe000, 0x565235def000, 0x12F1000,
694 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
698 * req_range consists of 4 values.
702 * 4. number that should be returned.
705 static const unsigned long req_range
[] = {
706 0x565234af9000, /* Min */
707 0x7fff58791000, /* Max */
709 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
710 0, /* Return value success. */
713 0x565234AF0 << 12, /* Max */
715 0x565234AEE << 12, /* max - 3. */
716 0, /* Return value success. */
721 562949953421311 << 12,/* First rev hole of size 0x1000 */
722 0, /* Return value success. */
725 0x7F36D5109 << 12, /* Max */
727 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
728 0, /* Return value success. */
739 18446744073709551615UL,
740 562915594369134UL << 12,
744 /* Single space test. */
752 int i
, range_count
= ARRAY_SIZE(range
);
753 int req_range_count
= ARRAY_SIZE(req_range
);
754 unsigned long min
= 0;
756 MA_STATE(mas
, mt
, 0, 0);
758 mtree_store_range(mt
, MTREE_ALLOC_MAX
, ULONG_MAX
, XA_ZERO_ENTRY
,
760 #define DEBUG_REV_RANGE 0
761 for (i
= 0; i
< range_count
; i
+= 2) {
762 /* Inclusive, Inclusive (with the -1) */
765 pr_debug("\t%s: Insert %lu-%lu\n", __func__
, range
[i
] >> 12,
766 (range
[i
+ 1] >> 12) - 1);
768 check_insert_range(mt
, range
[i
] >> 12, (range
[i
+ 1] >> 12) - 1,
769 xa_mk_value(range
[i
] >> 12), 0);
775 for (i
= 0; i
< ARRAY_SIZE(holes
); i
+= 3) {
777 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
778 min
, holes
[i
+1]>>12, holes
[i
+2]>>12,
781 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, min
,
785 pr_debug("Found %lu %lu\n", mas
.index
, mas
.last
);
786 pr_debug("gap %lu %lu\n", (holes
[i
] >> 12),
789 MT_BUG_ON(mt
, mas
.last
+ 1 != (holes
[i
+1] >> 12));
790 MT_BUG_ON(mt
, mas
.index
!= (holes
[i
+1] >> 12) - (holes
[i
+2] >> 12));
791 min
= holes
[i
+1] >> 12;
796 for (i
= 0; i
< req_range_count
; i
+= 5) {
798 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
799 i
, req_range
[i
] >> 12,
800 (req_range
[i
+ 1] >> 12),
801 req_range
[i
+2] >> 12,
802 req_range
[i
+3] >> 12);
804 check_mtree_alloc_rrange(mt
,
805 req_range
[i
] >> 12, /* start */
806 req_range
[i
+1] >> 12, /* end */
807 req_range
[i
+2] >> 12, /* size */
808 req_range
[i
+3] >> 12, /* expected address */
809 req_range
[i
+4], /* expected return */
810 xa_mk_value(req_range
[i
] >> 12)); /* pointer */
814 mt_set_non_kernel(1);
815 mtree_erase(mt
, 34148798727); /* create a deleted range. */
816 mtree_erase(mt
, 34148798725);
817 check_mtree_alloc_rrange(mt
, 0, 34359052173, 210253414,
823 static noinline
void __init
check_alloc_range(struct maple_tree
*mt
)
827 * cat /proc/self/maps|awk '{print $1}'|
828 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
831 static const unsigned long range
[] = {
832 /* Inclusive , Exclusive. */
833 0x565234af2000, 0x565234af4000,
834 0x565234af4000, 0x565234af9000,
835 0x565234af9000, 0x565234afb000,
836 0x565234afc000, 0x565234afd000,
837 0x565234afd000, 0x565234afe000,
838 0x565235def000, 0x565235e10000,
839 0x7f36d4bfd000, 0x7f36d4ee2000,
840 0x7f36d4ee2000, 0x7f36d4f04000,
841 0x7f36d4f04000, 0x7f36d504c000,
842 0x7f36d504c000, 0x7f36d5098000,
843 0x7f36d5098000, 0x7f36d5099000,
844 0x7f36d5099000, 0x7f36d509d000,
845 0x7f36d509d000, 0x7f36d509f000,
846 0x7f36d509f000, 0x7f36d50a5000,
847 0x7f36d50b9000, 0x7f36d50db000,
848 0x7f36d50db000, 0x7f36d50dc000,
849 0x7f36d50dc000, 0x7f36d50fa000,
850 0x7f36d50fa000, 0x7f36d5102000,
851 0x7f36d5102000, 0x7f36d5103000,
852 0x7f36d5103000, 0x7f36d5104000,
853 0x7f36d5104000, 0x7f36d5105000,
854 0x7fff5876b000, 0x7fff5878d000,
855 0x7fff5878e000, 0x7fff58791000,
856 0x7fff58791000, 0x7fff58793000,
858 static const unsigned long holes
[] = {
859 /* Start of hole, end of hole, size of hole (+1) */
860 0x565234afb000, 0x565234afc000, 0x1000,
861 0x565234afe000, 0x565235def000, 0x12F1000,
862 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
866 * req_range consists of 4 values.
870 * 4. number that should be returned.
873 static const unsigned long req_range
[] = {
874 0x565234af9000, /* Min */
875 0x7fff58791000, /* Max */
877 0x565234afb000, /* First hole in our data of size 1000. */
878 0, /* Return value success. */
881 0x7fff58791000, /* Max */
883 0x0, /* First hole in our data of size 2000. */
884 0, /* Return value success. */
887 34148797436 << 12, /* Min */
888 0x7fff587AF000, /* Max */
890 34148798629 << 12, /* Expected location */
891 0, /* Return value success. */
894 34148798623 << 12, /* Min */
895 34148798683 << 12, /* Max */
897 0, /* Expected location */
898 -EBUSY
, /* Return value failed. */
900 /* Test filling entire gap. */
901 34148798623 << 12, /* Min */
902 0x7fff587AF000, /* Max */
904 34148798632 << 12, /* Expected location */
905 0, /* Return value success. */
907 /* Test walking off the end of root. */
911 0, /* Expected location */
912 -EBUSY
, /* Return value failure. */
914 /* Test looking for too large a hole across entire range. */
917 4503599618982063UL << 12, /* Size */
918 34359052178 << 12, /* Expected location */
919 -EBUSY
, /* Return failure. */
921 /* Test a single entry */
922 34148798648 << 12, /* Min */
923 34148798648 << 12, /* Max */
924 4096, /* Size of 1 */
925 34148798648 << 12, /* Location is the same as min/max */
928 int i
, range_count
= ARRAY_SIZE(range
);
929 int req_range_count
= ARRAY_SIZE(req_range
);
930 unsigned long min
= 0x565234af2000;
931 MA_STATE(mas
, mt
, 0, 0);
933 mtree_store_range(mt
, MTREE_ALLOC_MAX
, ULONG_MAX
, XA_ZERO_ENTRY
,
935 for (i
= 0; i
< range_count
; i
+= 2) {
936 #define DEBUG_ALLOC_RANGE 0
937 #if DEBUG_ALLOC_RANGE
938 pr_debug("\tInsert %lu-%lu\n", range
[i
] >> 12,
939 (range
[i
+ 1] >> 12) - 1);
940 mt_dump(mt
, mt_dump_hex
);
942 check_insert_range(mt
, range
[i
] >> 12, (range
[i
+ 1] >> 12) - 1,
943 xa_mk_value(range
[i
] >> 12), 0);
950 for (i
= 0; i
< ARRAY_SIZE(holes
); i
+= 3) {
952 #if DEBUG_ALLOC_RANGE
953 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min
>> 12,
954 holes
[i
+1] >> 12, holes
[i
+2] >> 12,
957 MT_BUG_ON(mt
, mas_empty_area(&mas
, min
>> 12,
960 MT_BUG_ON(mt
, mas
.index
!= holes
[i
] >> 12);
965 for (i
= 0; i
< req_range_count
; i
+= 5) {
966 #if DEBUG_ALLOC_RANGE
967 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
968 i
/5, req_range
[i
] >> 12, req_range
[i
+ 1] >> 12,
969 req_range
[i
+ 2] >> 12, req_range
[i
+ 3] >> 12,
970 req_range
[i
], req_range
[i
+1]);
972 check_mtree_alloc_range(mt
,
973 req_range
[i
] >> 12, /* start */
974 req_range
[i
+1] >> 12, /* end */
975 req_range
[i
+2] >> 12, /* size */
976 req_range
[i
+3] >> 12, /* expected address */
977 req_range
[i
+4], /* expected return */
978 xa_mk_value(req_range
[i
] >> 12)); /* pointer */
980 #if DEBUG_ALLOC_RANGE
981 mt_dump(mt
, mt_dump_hex
);
989 static noinline
void __init
check_ranges(struct maple_tree
*mt
)
992 static const unsigned long r
[] = {
995 17, 22, /* Overlaps previous range. */
1002 MT_BUG_ON(mt
, !mtree_empty(mt
));
1003 check_insert_range(mt
, r
[0], r
[1], xa_mk_value(r
[0]), 0);
1004 check_insert_range(mt
, r
[2], r
[3], xa_mk_value(r
[2]), 0);
1005 check_insert_range(mt
, r
[4], r
[5], xa_mk_value(r
[4]), -EEXIST
);
1006 MT_BUG_ON(mt
, !mt_height(mt
));
1008 check_store_range(mt
, r
[4], r
[5], xa_mk_value(r
[4]), 0);
1009 check_store_range(mt
, r
[6], r
[7], xa_mk_value(r
[6]), 0);
1010 check_store_range(mt
, r
[8], r
[9], xa_mk_value(r
[8]), 0);
1011 MT_BUG_ON(mt
, !mt_height(mt
));
1013 MT_BUG_ON(mt
, mt_height(mt
));
1015 check_seq(mt
, 50, false);
1016 mt_set_non_kernel(4);
1017 check_store_range(mt
, 5, 47, xa_mk_value(47), 0);
1018 MT_BUG_ON(mt
, !mt_height(mt
));
1021 /* Create tree of 1-100 */
1022 check_seq(mt
, 100, false);
1024 mt_set_non_kernel(10);
1025 check_store_range(mt
, r
[10], r
[11], xa_mk_value(r
[10]), 0);
1026 MT_BUG_ON(mt
, !mt_height(mt
));
1029 /* Create tree of 1-200 */
1030 check_seq(mt
, 200, false);
1032 check_store_range(mt
, r
[10], r
[11], xa_mk_value(r
[10]), 0);
1033 MT_BUG_ON(mt
, !mt_height(mt
));
1036 check_seq(mt
, 30, false);
1037 check_store_range(mt
, 6, 18, xa_mk_value(6), 0);
1038 MT_BUG_ON(mt
, !mt_height(mt
));
1041 /* Overwrite across multiple levels. */
1042 /* Create tree of 1-400 */
1043 check_seq(mt
, 400, false);
1044 mt_set_non_kernel(50);
1046 check_store_range(mt
, r
[12], r
[13], xa_mk_value(r
[12]), 0);
1047 mt_set_non_kernel(50);
1048 mtree_test_erase(mt
, 140);
1049 mtree_test_erase(mt
, 141);
1050 mtree_test_erase(mt
, 142);
1051 mtree_test_erase(mt
, 143);
1052 mtree_test_erase(mt
, 130);
1053 mtree_test_erase(mt
, 131);
1054 mtree_test_erase(mt
, 132);
1055 mtree_test_erase(mt
, 133);
1056 mtree_test_erase(mt
, 134);
1057 mtree_test_erase(mt
, 135);
1058 check_load(mt
, r
[12], xa_mk_value(r
[12]));
1059 check_load(mt
, r
[13], xa_mk_value(r
[12]));
1060 check_load(mt
, r
[13] - 1, xa_mk_value(r
[12]));
1061 check_load(mt
, r
[13] + 1, xa_mk_value(r
[13] + 1));
1062 check_load(mt
, 135, NULL
);
1063 check_load(mt
, 140, NULL
);
1064 mt_set_non_kernel(0);
1065 MT_BUG_ON(mt
, !mt_height(mt
));
1070 /* Overwrite multiple levels at the end of the tree (slot 7) */
1071 mt_set_non_kernel(50);
1072 check_seq(mt
, 400, false);
1073 check_store_range(mt
, 353, 361, xa_mk_value(353), 0);
1074 check_store_range(mt
, 347, 352, xa_mk_value(347), 0);
1076 check_load(mt
, 346, xa_mk_value(346));
1077 for (i
= 347; i
<= 352; i
++)
1078 check_load(mt
, i
, xa_mk_value(347));
1079 for (i
= 353; i
<= 361; i
++)
1080 check_load(mt
, i
, xa_mk_value(353));
1081 check_load(mt
, 362, xa_mk_value(362));
1082 mt_set_non_kernel(0);
1083 MT_BUG_ON(mt
, !mt_height(mt
));
1086 mt_set_non_kernel(50);
1087 check_seq(mt
, 400, false);
1088 check_store_range(mt
, 352, 364, NULL
, 0);
1089 check_store_range(mt
, 351, 363, xa_mk_value(352), 0);
1090 check_load(mt
, 350, xa_mk_value(350));
1091 check_load(mt
, 351, xa_mk_value(352));
1092 for (i
= 352; i
<= 363; i
++)
1093 check_load(mt
, i
, xa_mk_value(352));
1094 check_load(mt
, 364, NULL
);
1095 check_load(mt
, 365, xa_mk_value(365));
1096 mt_set_non_kernel(0);
1097 MT_BUG_ON(mt
, !mt_height(mt
));
1100 mt_set_non_kernel(5);
1101 check_seq(mt
, 400, false);
1102 check_store_range(mt
, 352, 364, NULL
, 0);
1103 check_store_range(mt
, 351, 364, xa_mk_value(352), 0);
1104 check_load(mt
, 350, xa_mk_value(350));
1105 check_load(mt
, 351, xa_mk_value(352));
1106 for (i
= 352; i
<= 364; i
++)
1107 check_load(mt
, i
, xa_mk_value(352));
1108 check_load(mt
, 365, xa_mk_value(365));
1109 mt_set_non_kernel(0);
1110 MT_BUG_ON(mt
, !mt_height(mt
));
1114 mt_set_non_kernel(50);
1115 check_seq(mt
, 400, false);
1116 check_store_range(mt
, 362, 367, xa_mk_value(362), 0);
1117 check_store_range(mt
, 353, 361, xa_mk_value(353), 0);
1118 mt_set_non_kernel(0);
1120 MT_BUG_ON(mt
, !mt_height(mt
));
1123 * Interesting cases:
1124 * 1. Overwrite the end of a node and end in the first entry of the next
1126 * 2. Split a single range
1127 * 3. Overwrite the start of a range
1128 * 4. Overwrite the end of a range
1129 * 5. Overwrite the entire range
1130 * 6. Overwrite a range that causes multiple parent nodes to be
1132 * 7. Overwrite a range that causes multiple parent nodes and part of
1133 * root to be combined
1134 * 8. Overwrite the whole tree
1135 * 9. Try to overwrite the zero entry of an alloc tree.
1136 * 10. Write a range larger than a nodes current pivot
1139 mt_set_non_kernel(50);
1140 for (i
= 0; i
<= 500; i
++) {
1143 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1145 check_store_range(mt
, 2400, 2400, xa_mk_value(2400), 0);
1146 check_store_range(mt
, 2411, 2411, xa_mk_value(2411), 0);
1147 check_store_range(mt
, 2412, 2412, xa_mk_value(2412), 0);
1148 check_store_range(mt
, 2396, 2400, xa_mk_value(4052020), 0);
1149 check_store_range(mt
, 2402, 2402, xa_mk_value(2402), 0);
1151 mt_set_non_kernel(0);
1153 mt_set_non_kernel(50);
1154 for (i
= 0; i
<= 500; i
++) {
1157 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1159 check_store_range(mt
, 2422, 2422, xa_mk_value(2422), 0);
1160 check_store_range(mt
, 2424, 2424, xa_mk_value(2424), 0);
1161 check_store_range(mt
, 2425, 2425, xa_mk_value(2), 0);
1162 check_store_range(mt
, 2460, 2470, NULL
, 0);
1163 check_store_range(mt
, 2435, 2460, xa_mk_value(2435), 0);
1164 check_store_range(mt
, 2461, 2470, xa_mk_value(2461), 0);
1165 mt_set_non_kernel(0);
1166 MT_BUG_ON(mt
, !mt_height(mt
));
1169 /* Check in-place modifications */
1170 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1171 /* Append to the start of last range */
1172 mt_set_non_kernel(50);
1173 for (i
= 0; i
<= 500; i
++) {
1176 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1179 /* Append to the last range without touching any boundaries */
1180 for (i
= 0; i
< 10; i
++) {
1183 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1186 /* Append to the end of last range */
1188 for (i
= 0; i
< 10; i
++) {
1190 MT_BUG_ON(mt
, mtree_test_store_range(mt
, val
, ULONG_MAX
,
1191 xa_mk_value(val
)) != 0);
1194 /* Overwriting the range and over a part of the next range */
1195 for (i
= 10; i
< 30; i
+= 2) {
1198 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1201 /* Overwriting a part of the range and over the next range */
1202 for (i
= 50; i
< 70; i
+= 2) {
1205 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1209 * Expand the range, only partially overwriting the previous and
1212 for (i
= 100; i
< 130; i
+= 3) {
1215 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1219 * Expand the range, only partially overwriting the previous and
1220 * next ranges, in RCU mode
1223 for (i
= 150; i
< 180; i
+= 3) {
1226 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1229 MT_BUG_ON(mt
, !mt_height(mt
));
1231 mt_set_non_kernel(0);
1234 /* Test rebalance gaps */
1235 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1236 mt_set_non_kernel(50);
1237 for (i
= 0; i
<= 50; i
++) {
1240 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1242 check_store_range(mt
, 161, 161, xa_mk_value(161), 0);
1243 check_store_range(mt
, 162, 162, xa_mk_value(162), 0);
1244 check_store_range(mt
, 163, 163, xa_mk_value(163), 0);
1245 check_store_range(mt
, 240, 249, NULL
, 0);
1246 mtree_erase(mt
, 200);
1247 mtree_erase(mt
, 210);
1248 mtree_erase(mt
, 220);
1249 mtree_erase(mt
, 230);
1250 mt_set_non_kernel(0);
1251 MT_BUG_ON(mt
, !mt_height(mt
));
1254 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1255 for (i
= 0; i
<= 500; i
++) {
1258 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1260 check_store_range(mt
, 4600, 4959, xa_mk_value(1), 0);
1262 MT_BUG_ON(mt
, !mt_height(mt
));
1265 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1266 for (i
= 0; i
<= 500; i
++) {
1269 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1271 check_store_range(mt
, 4811, 4811, xa_mk_value(4811), 0);
1272 check_store_range(mt
, 4812, 4812, xa_mk_value(4812), 0);
1273 check_store_range(mt
, 4861, 4861, xa_mk_value(4861), 0);
1274 check_store_range(mt
, 4862, 4862, xa_mk_value(4862), 0);
1275 check_store_range(mt
, 4842, 4849, NULL
, 0);
1277 MT_BUG_ON(mt
, !mt_height(mt
));
1280 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1281 for (i
= 0; i
<= 1300; i
++) {
1284 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1285 MT_BUG_ON(mt
, mt_height(mt
) >= 4);
1287 /* Cause a 3 child split all the way up the tree. */
1288 for (i
= 5; i
< 215; i
+= 10)
1289 check_store_range(mt
, 11450 + i
, 11450 + i
+ 1, NULL
, 0);
1290 for (i
= 5; i
< 65; i
+= 10)
1291 check_store_range(mt
, 11770 + i
, 11770 + i
+ 1, NULL
, 0);
1293 MT_BUG_ON(mt
, mt_height(mt
) >= 4);
1294 for (i
= 5; i
< 45; i
+= 10)
1295 check_store_range(mt
, 11700 + i
, 11700 + i
+ 1, NULL
, 0);
1297 MT_BUG_ON(mt
, mt_height(mt
) < 4);
1301 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1302 for (i
= 0; i
<= 1200; i
++) {
1305 check_store_range(mt
, val
, val2
, xa_mk_value(val
), 0);
1306 MT_BUG_ON(mt
, mt_height(mt
) >= 4);
1308 /* Fill parents and leaves before split. */
1309 for (i
= 5; i
< 455; i
+= 10)
1310 check_store_range(mt
, 7800 + i
, 7800 + i
+ 1, NULL
, 0);
1312 for (i
= 1; i
< 16; i
++)
1313 check_store_range(mt
, 8185 + i
, 8185 + i
+ 1,
1314 xa_mk_value(8185+i
), 0);
1315 MT_BUG_ON(mt
, mt_height(mt
) >= 4);
1316 /* triple split across multiple levels. */
1317 check_store_range(mt
, 8184, 8184, xa_mk_value(8184), 0);
1319 MT_BUG_ON(mt
, mt_height(mt
) != 4);
1322 static noinline
void __init
check_next_entry(struct maple_tree
*mt
)
1325 unsigned long limit
= 30, i
= 0;
1326 MA_STATE(mas
, mt
, i
, i
);
1328 MT_BUG_ON(mt
, !mtree_empty(mt
));
1330 check_seq(mt
, limit
, false);
1333 /* Check the first one and get ma_state in the correct state. */
1334 MT_BUG_ON(mt
, mas_walk(&mas
) != xa_mk_value(i
++));
1335 for ( ; i
<= limit
+ 1; i
++) {
1336 entry
= mas_next(&mas
, limit
);
1338 MT_BUG_ON(mt
, entry
!= NULL
);
1340 MT_BUG_ON(mt
, xa_mk_value(i
) != entry
);
1346 static noinline
void __init
check_prev_entry(struct maple_tree
*mt
)
1348 unsigned long index
= 16;
1352 MA_STATE(mas
, mt
, index
, index
);
1354 MT_BUG_ON(mt
, !mtree_empty(mt
));
1355 check_seq(mt
, 30, false);
1358 value
= mas_find(&mas
, ULONG_MAX
);
1359 MT_BUG_ON(mt
, value
!= xa_mk_value(index
));
1360 value
= mas_prev(&mas
, 0);
1361 MT_BUG_ON(mt
, value
!= xa_mk_value(index
- 1));
1365 /* Check limits on prev */
1366 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1368 for (i
= 0; i
<= index
; i
++) {
1369 mas_set_range(&mas
, i
*10, i
*10+5);
1370 mas_store_gfp(&mas
, xa_mk_value(i
), GFP_KERNEL
);
1374 value
= mas_walk(&mas
);
1375 MT_BUG_ON(mt
, value
!= xa_mk_value(2));
1377 value
= mas_prev(&mas
, 19);
1378 MT_BUG_ON(mt
, value
!= NULL
);
1381 value
= mas_walk(&mas
);
1382 MT_BUG_ON(mt
, value
!= xa_mk_value(8));
1384 value
= mas_prev(&mas
, 76);
1385 MT_BUG_ON(mt
, value
!= NULL
);
1390 static noinline
void __init
check_store_null(struct maple_tree
*mt
)
1392 MA_STATE(mas
, mt
, 0, ULONG_MAX
);
1395 * Store NULL at range [0, ULONG_MAX] to an empty tree should result
1398 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1400 mas_store_gfp(&mas
, NULL
, GFP_KERNEL
);
1401 MT_BUG_ON(mt
, !mtree_empty(mt
));
1406 * Store NULL at any range to an empty tree should result in an empty
1409 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1411 mas_set_range(&mas
, 3, 10);
1412 mas_store_gfp(&mas
, NULL
, GFP_KERNEL
);
1413 MT_BUG_ON(mt
, !mtree_empty(mt
));
1418 * Store NULL at range [0, ULONG_MAX] to a single entry tree should
1419 * result in an empty tree
1421 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1424 mas_store_gfp(&mas
, &mas
, GFP_KERNEL
);
1425 mas_set_range(&mas
, 0, ULONG_MAX
);
1426 mas_store_gfp(&mas
, NULL
, GFP_KERNEL
);
1427 MT_BUG_ON(mt
, !mtree_empty(mt
));
1432 * Store NULL at range [0, n] to a single entry tree should
1433 * result in an empty tree
1435 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1438 mas_store_gfp(&mas
, &mas
, GFP_KERNEL
);
1439 mas_set_range(&mas
, 0, 5);
1440 mas_store_gfp(&mas
, NULL
, GFP_KERNEL
);
1441 MT_BUG_ON(mt
, !mtree_empty(mt
));
1446 * Store NULL at range [m, n] where m > 0 to a single entry tree
1447 * should still be a single entry tree
1449 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1452 mas_store_gfp(&mas
, &mas
, GFP_KERNEL
);
1453 mas_set_range(&mas
, 2, 5);
1454 mas_store_gfp(&mas
, NULL
, GFP_KERNEL
);
1455 MT_BUG_ON(mt
, mtree_empty(mt
));
1456 // MT_BUG_ON(mt, xa_is_node(mas_root(&mas)));
1461 * Store NULL at range [0, ULONG_MAX] to a tree with node should
1462 * result in an empty tree
1464 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1466 mas_set_range(&mas
, 1, 3);
1467 mas_store_gfp(&mas
, &mas
, GFP_KERNEL
);
1468 // MT_BUG_ON(mt, !xa_is_node(mas_root(&mas)));
1469 mas_set_range(&mas
, 0, ULONG_MAX
);
1470 mas_store_gfp(&mas
, NULL
, GFP_KERNEL
);
1471 MT_BUG_ON(mt
, !mtree_empty(mt
));
1476 static noinline
void __init
check_root_expand(struct maple_tree
*mt
)
1478 MA_STATE(mas
, mt
, 0, 0);
1484 ptr
= mas_walk(&mas
);
1485 MT_BUG_ON(mt
, mas
.index
!= 0);
1486 MT_BUG_ON(mt
, ptr
!= NULL
);
1487 MT_BUG_ON(mt
, mas
.index
!= 0);
1488 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
1490 ptr
= &check_prev_entry
;
1492 mas_store_gfp(&mas
, ptr
, GFP_KERNEL
);
1495 ptr
= mas_walk(&mas
);
1496 MT_BUG_ON(mt
, ptr
!= NULL
);
1499 ptr
= mas_walk(&mas
);
1500 MT_BUG_ON(mt
, ptr
!= &check_prev_entry
);
1503 ptr
= mas_walk(&mas
);
1504 MT_BUG_ON(mt
, ptr
!= NULL
);
1509 mt_init_flags(mt
, 0);
1513 ptr
= &check_prev_entry
;
1514 mas_store_gfp(&mas
, ptr
, GFP_KERNEL
);
1517 ptr
= mas_walk(&mas
);
1518 MT_BUG_ON(mt
, ptr
!= NULL
);
1519 MT_BUG_ON(mt
, mas
.index
!= 1);
1520 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
1522 mas_set_range(&mas
, 0, 100);
1523 ptr
= mas_walk(&mas
);
1524 MT_BUG_ON(mt
, ptr
!= &check_prev_entry
);
1525 MT_BUG_ON(mt
, mas
.last
!= 0);
1529 mt_init_flags(mt
, 0);
1533 ptr
= (void *)((unsigned long) check_prev_entry
| 1UL);
1534 mas_store_gfp(&mas
, ptr
, GFP_KERNEL
);
1535 ptr
= mas_next(&mas
, ULONG_MAX
);
1536 MT_BUG_ON(mt
, ptr
!= NULL
);
1537 MT_BUG_ON(mt
, (mas
.index
!= 1) && (mas
.last
!= ULONG_MAX
));
1540 ptr
= mas_prev(&mas
, 0);
1541 MT_BUG_ON(mt
, (mas
.index
!= 0) && (mas
.last
!= 0));
1542 MT_BUG_ON(mt
, ptr
!= (void *)((unsigned long) check_prev_entry
| 1UL));
1548 mt_init_flags(mt
, 0);
1551 ptr
= (void *)((unsigned long) check_prev_entry
| 2UL);
1552 mas_store_gfp(&mas
, ptr
, GFP_KERNEL
);
1553 ptr
= mas_next(&mas
, ULONG_MAX
);
1554 MT_BUG_ON(mt
, ptr
!= NULL
);
1555 MT_BUG_ON(mt
, (mas
.index
!= ULONG_MAX
) && (mas
.last
!= ULONG_MAX
));
1558 ptr
= mas_prev(&mas
, 0);
1559 MT_BUG_ON(mt
, (mas
.index
!= 0) && (mas
.last
!= 0));
1560 MT_BUG_ON(mt
, ptr
!= (void *)((unsigned long) check_prev_entry
| 2UL));
1566 static noinline
void __init
check_gap_combining(struct maple_tree
*mt
)
1568 struct maple_enode
*mn1
, *mn2
;
1570 unsigned long singletons
= 100;
1571 static const unsigned long *seq100
;
1572 static const unsigned long seq100_64
[] = {
1586 static const unsigned long seq100_32
[] = {
1600 static const unsigned long seq2000
[] = {
1604 static const unsigned long seq400
[] = {
1606 256, 260, 266, 270, 275, 280, 290, 398,
1610 unsigned long index
;
1612 MA_STATE(mas
, mt
, 0, 0);
1620 mas_set(&mas
, index
);
1621 MT_BUG_ON(mt
, !mtree_empty(mt
));
1622 check_seq(mt
, singletons
, false); /* create 100 singletons. */
1624 mt_set_non_kernel(1);
1625 mtree_test_erase(mt
, seq100
[2]);
1626 check_load(mt
, seq100
[2], NULL
);
1627 mtree_test_erase(mt
, seq100
[1]);
1628 check_load(mt
, seq100
[1], NULL
);
1631 entry
= mas_find(&mas
, ULONG_MAX
);
1632 MT_BUG_ON(mt
, entry
!= xa_mk_value(index
));
1634 mas_next(&mas
, ULONG_MAX
);
1635 entry
= mas_next(&mas
, ULONG_MAX
);
1636 MT_BUG_ON(mt
, entry
!= xa_mk_value(index
+ 4));
1638 MT_BUG_ON(mt
, mn1
== mn2
); /* test the test. */
1641 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1642 * seq100[4]. Search for the gap.
1644 mt_set_non_kernel(1);
1646 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, seq100
[3], seq100
[4],
1648 MT_BUG_ON(mt
, mas
.index
!= index
+ 1);
1651 mtree_test_erase(mt
, seq100
[6]);
1652 check_load(mt
, seq100
[6], NULL
);
1653 mtree_test_erase(mt
, seq100
[7]);
1654 check_load(mt
, seq100
[7], NULL
);
1655 mtree_test_erase(mt
, seq100
[8]);
1662 entry
= mas_find(&mas
, ULONG_MAX
);
1663 MT_BUG_ON(mt
, entry
!= xa_mk_value(index
));
1665 entry
= mas_next(&mas
, ULONG_MAX
);
1666 MT_BUG_ON(mt
, entry
!= xa_mk_value(index
+ 4));
1667 mas_next(&mas
, ULONG_MAX
); /* go to the next entry. */
1669 MT_BUG_ON(mt
, mn1
== mn2
); /* test the next entry is in the next node. */
1672 * At this point, there is a gap of 3 at seq100[6]. Find it by
1673 * searching 20 - 50 for size 3.
1676 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, seq100
[10], seq100
[11],
1678 MT_BUG_ON(mt
, mas
.index
!= seq100
[6]);
1681 mt_set_non_kernel(1);
1682 mtree_store(mt
, seq100
[13], NULL
, GFP_KERNEL
);
1683 check_load(mt
, seq100
[13], NULL
);
1684 check_load(mt
, seq100
[14], xa_mk_value(seq100
[14]));
1685 mtree_store(mt
, seq100
[14], NULL
, GFP_KERNEL
);
1686 check_load(mt
, seq100
[13], NULL
);
1687 check_load(mt
, seq100
[14], NULL
);
1691 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, seq100
[16], seq100
[15],
1693 MT_BUG_ON(mt
, mas
.index
!= seq100
[13]);
1698 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1701 mt_set_non_kernel(2);
1702 mtree_test_store_range(mt
, seq100
[18], seq100
[14], NULL
);
1703 mtree_test_erase(mt
, seq100
[15]);
1706 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, seq100
[16], seq100
[19],
1709 MT_BUG_ON(mt
, mas
.index
!= seq100
[18]);
1713 /* seq 2000 tests are for multi-level tree gaps */
1714 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1715 check_seq(mt
, 2000, false);
1716 mt_set_non_kernel(1);
1717 mtree_test_erase(mt
, seq2000
[0]);
1718 mtree_test_erase(mt
, seq2000
[1]);
1720 mt_set_non_kernel(2);
1723 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, seq2000
[2], seq2000
[3],
1725 MT_BUG_ON(mt
, mas
.index
!= seq2000
[1]);
1730 /* seq 400 tests rebalancing over two levels. */
1731 mt_set_non_kernel(99);
1732 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1733 check_seq(mt
, 400, false);
1734 mtree_test_store_range(mt
, seq400
[0], seq400
[1], NULL
);
1735 mt_set_non_kernel(0);
1738 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
1739 check_seq(mt
, 400, false);
1740 mt_set_non_kernel(50);
1741 mtree_test_store_range(mt
, seq400
[2], seq400
[9],
1742 xa_mk_value(seq400
[2]));
1743 mtree_test_store_range(mt
, seq400
[3], seq400
[9],
1744 xa_mk_value(seq400
[3]));
1745 mtree_test_store_range(mt
, seq400
[4], seq400
[9],
1746 xa_mk_value(seq400
[4]));
1747 mtree_test_store_range(mt
, seq400
[5], seq400
[9],
1748 xa_mk_value(seq400
[5]));
1749 mtree_test_store_range(mt
, seq400
[0], seq400
[9],
1750 xa_mk_value(seq400
[0]));
1751 mtree_test_store_range(mt
, seq400
[6], seq400
[9],
1752 xa_mk_value(seq400
[6]));
1753 mtree_test_store_range(mt
, seq400
[7], seq400
[9],
1754 xa_mk_value(seq400
[7]));
1755 mtree_test_store_range(mt
, seq400
[8], seq400
[9],
1756 xa_mk_value(seq400
[8]));
1757 mtree_test_store_range(mt
, seq400
[10], seq400
[11],
1758 xa_mk_value(seq400
[10]));
1760 mt_set_non_kernel(0);
1763 static noinline
void __init
check_node_overwrite(struct maple_tree
*mt
)
1767 for (i
= 0; i
< max
; i
++)
1768 mtree_test_store_range(mt
, i
*100, i
*100 + 50, xa_mk_value(i
*100));
1770 mtree_test_store_range(mt
, 319951, 367950, NULL
);
1771 /*mt_dump(mt, mt_dump_dec); */
1775 #if defined(BENCH_SLOT_STORE)
1776 static noinline
void __init
bench_slot_store(struct maple_tree
*mt
)
1778 int i
, brk
= 105, max
= 1040, brk_start
= 100, count
= 20000000;
1780 for (i
= 0; i
< max
; i
+= 10)
1781 mtree_store_range(mt
, i
, i
+ 5, xa_mk_value(i
), GFP_KERNEL
);
1783 for (i
= 0; i
< count
; i
++) {
1784 mtree_store_range(mt
, brk
, brk
, NULL
, GFP_KERNEL
);
1785 mtree_store_range(mt
, brk_start
, brk
, xa_mk_value(brk
),
1791 #if defined(BENCH_NODE_STORE)
1792 static noinline
void __init
bench_node_store(struct maple_tree
*mt
)
1794 int i
, overwrite
= 76, max
= 240, count
= 20000000;
1796 for (i
= 0; i
< max
; i
+= 10)
1797 mtree_store_range(mt
, i
, i
+ 5, xa_mk_value(i
), GFP_KERNEL
);
1799 for (i
= 0; i
< count
; i
++) {
1800 mtree_store_range(mt
, overwrite
, overwrite
+ 15,
1801 xa_mk_value(overwrite
), GFP_KERNEL
);
1804 if (overwrite
>= 135)
1810 #if defined(BENCH_AWALK)
1811 static noinline
void __init
bench_awalk(struct maple_tree
*mt
)
1813 int i
, max
= 2500, count
= 50000000;
1814 MA_STATE(mas
, mt
, 1470, 1470);
1816 for (i
= 0; i
< max
; i
+= 10)
1817 mtree_store_range(mt
, i
, i
+ 5, xa_mk_value(i
), GFP_KERNEL
);
1819 mtree_store_range(mt
, 1470, 1475, NULL
, GFP_KERNEL
);
1821 for (i
= 0; i
< count
; i
++) {
1822 mas_empty_area_rev(&mas
, 0, 2000, 10);
1827 #if defined(BENCH_WALK)
1828 static noinline
void __init
bench_walk(struct maple_tree
*mt
)
1830 int i
, max
= 2500, count
= 550000000;
1831 MA_STATE(mas
, mt
, 1470, 1470);
1833 for (i
= 0; i
< max
; i
+= 10)
1834 mtree_store_range(mt
, i
, i
+ 5, xa_mk_value(i
), GFP_KERNEL
);
1836 for (i
= 0; i
< count
; i
++) {
1844 #if defined(BENCH_LOAD)
1845 static noinline
void __init
bench_load(struct maple_tree
*mt
)
1847 int i
, max
= 2500, count
= 550000000;
1849 for (i
= 0; i
< max
; i
+= 10)
1850 mtree_store_range(mt
, i
, i
+ 5, xa_mk_value(i
), GFP_KERNEL
);
1852 for (i
= 0; i
< count
; i
++)
1853 mtree_load(mt
, 1470);
1857 #if defined(BENCH_MT_FOR_EACH)
1858 static noinline
void __init
bench_mt_for_each(struct maple_tree
*mt
)
1860 int i
, count
= 1000000;
1861 unsigned long max
= 2500, index
= 0;
1864 for (i
= 0; i
< max
; i
+= 5)
1865 mtree_store_range(mt
, i
, i
+ 4, xa_mk_value(i
), GFP_KERNEL
);
1867 for (i
= 0; i
< count
; i
++) {
1868 unsigned long j
= 0;
1870 mt_for_each(mt
, entry
, index
, max
) {
1871 MT_BUG_ON(mt
, entry
!= xa_mk_value(j
));
1881 #if defined(BENCH_MAS_FOR_EACH)
1882 static noinline
void __init
bench_mas_for_each(struct maple_tree
*mt
)
1884 int i
, count
= 1000000;
1885 unsigned long max
= 2500;
1887 MA_STATE(mas
, mt
, 0, 0);
1889 for (i
= 0; i
< max
; i
+= 5) {
1894 mtree_store_range(mt
, i
, i
+ gap
, xa_mk_value(i
), GFP_KERNEL
);
1898 for (i
= 0; i
< count
; i
++) {
1899 unsigned long j
= 0;
1901 mas_for_each(&mas
, entry
, max
) {
1902 MT_BUG_ON(mt
, entry
!= xa_mk_value(j
));
1911 #if defined(BENCH_MAS_PREV)
1912 static noinline
void __init
bench_mas_prev(struct maple_tree
*mt
)
1914 int i
, count
= 1000000;
1915 unsigned long max
= 2500;
1917 MA_STATE(mas
, mt
, 0, 0);
1919 for (i
= 0; i
< max
; i
+= 5) {
1924 mtree_store_range(mt
, i
, i
+ gap
, xa_mk_value(i
), GFP_KERNEL
);
1928 for (i
= 0; i
< count
; i
++) {
1929 unsigned long j
= 2495;
1931 mas_set(&mas
, ULONG_MAX
);
1932 while ((entry
= mas_prev(&mas
, 0)) != NULL
) {
1933 MT_BUG_ON(mt
, entry
!= xa_mk_value(j
));
1941 /* check_forking - simulate the kernel forking sequence with the tree. */
1942 static noinline
void __init
check_forking(void)
1944 struct maple_tree mt
, newmt
;
1945 int i
, nr_entries
= 134, ret
;
1947 MA_STATE(mas
, &mt
, 0, 0);
1948 MA_STATE(newmas
, &newmt
, 0, 0);
1949 struct rw_semaphore mt_lock
, newmt_lock
;
1951 init_rwsem(&mt_lock
);
1952 init_rwsem(&newmt_lock
);
1954 mt_init_flags(&mt
, MT_FLAGS_ALLOC_RANGE
| MT_FLAGS_LOCK_EXTERN
);
1955 mt_set_external_lock(&mt
, &mt_lock
);
1957 mt_init_flags(&newmt
, MT_FLAGS_ALLOC_RANGE
| MT_FLAGS_LOCK_EXTERN
);
1958 mt_set_external_lock(&newmt
, &newmt_lock
);
1960 down_write(&mt_lock
);
1961 for (i
= 0; i
<= nr_entries
; i
++) {
1962 mas_set_range(&mas
, i
*10, i
*10 + 5);
1963 mas_store_gfp(&mas
, xa_mk_value(i
), GFP_KERNEL
);
1966 down_write_nested(&newmt_lock
, SINGLE_DEPTH_NESTING
);
1967 ret
= __mt_dup(&mt
, &newmt
, GFP_KERNEL
);
1973 mas_set(&newmas
, 0);
1974 mas_for_each(&newmas
, val
, ULONG_MAX
)
1975 mas_store(&newmas
, val
);
1977 mas_destroy(&newmas
);
1979 mt_validate(&newmt
);
1980 __mt_destroy(&newmt
);
1982 up_write(&newmt_lock
);
1986 static noinline
void __init
check_iteration(struct maple_tree
*mt
)
1988 int i
, nr_entries
= 125;
1990 MA_STATE(mas
, mt
, 0, 0);
1992 for (i
= 0; i
<= nr_entries
; i
++)
1993 mtree_store_range(mt
, i
* 10, i
* 10 + 9,
1994 xa_mk_value(i
), GFP_KERNEL
);
1996 mt_set_non_kernel(99999);
2000 mas_for_each(&mas
, val
, 925) {
2001 MT_BUG_ON(mt
, mas
.index
!= i
* 10);
2002 MT_BUG_ON(mt
, mas
.last
!= i
* 10 + 9);
2003 /* Overwrite end of entry 92 */
2007 mas_store(&mas
, val
);
2011 /* Ensure mas_find() gets the next value */
2012 val
= mas_find(&mas
, ULONG_MAX
);
2013 MT_BUG_ON(mt
, val
!= xa_mk_value(i
));
2017 mas_for_each(&mas
, val
, 785) {
2018 MT_BUG_ON(mt
, mas
.index
!= i
* 10);
2019 MT_BUG_ON(mt
, mas
.last
!= i
* 10 + 9);
2020 /* Overwrite start of entry 78 */
2024 mas_store(&mas
, val
);
2029 val
= mas_find(&mas
, ULONG_MAX
);
2030 MT_BUG_ON(mt
, val
!= xa_mk_value(i
));
2034 mas_for_each(&mas
, val
, 765) {
2035 MT_BUG_ON(mt
, mas
.index
!= i
* 10);
2036 MT_BUG_ON(mt
, mas
.last
!= i
* 10 + 9);
2037 /* Overwrite end of entry 76 and advance to the end */
2041 mas_store(&mas
, val
);
2045 /* Make sure the next find returns the one after 765, 766-769 */
2046 val
= mas_find(&mas
, ULONG_MAX
);
2047 MT_BUG_ON(mt
, val
!= xa_mk_value(76));
2050 mt_set_non_kernel(0);
2053 static noinline
void __init
check_mas_store_gfp(struct maple_tree
*mt
)
2056 struct maple_tree newmt
;
2057 int i
, nr_entries
= 135;
2059 MA_STATE(mas
, mt
, 0, 0);
2060 MA_STATE(newmas
, mt
, 0, 0);
2062 for (i
= 0; i
<= nr_entries
; i
++)
2063 mtree_store_range(mt
, i
*10, i
*10 + 5,
2064 xa_mk_value(i
), GFP_KERNEL
);
2066 mt_set_non_kernel(99999);
2067 mt_init_flags(&newmt
, MT_FLAGS_ALLOC_RANGE
);
2068 newmas
.tree
= &newmt
;
2073 mas_for_each(&mas
, val
, ULONG_MAX
) {
2074 newmas
.index
= mas
.index
;
2075 newmas
.last
= mas
.last
;
2076 mas_store_gfp(&newmas
, val
, GFP_KERNEL
);
2078 mas_unlock(&newmas
);
2080 mt_validate(&newmt
);
2081 mt_set_non_kernel(0);
2082 mtree_destroy(&newmt
);
2085 #if defined(BENCH_FORK)
2086 static noinline
void __init
bench_forking(void)
2088 struct maple_tree mt
, newmt
;
2089 int i
, nr_entries
= 134, nr_fork
= 80000, ret
;
2091 MA_STATE(mas
, &mt
, 0, 0);
2092 MA_STATE(newmas
, &newmt
, 0, 0);
2093 struct rw_semaphore mt_lock
, newmt_lock
;
2095 init_rwsem(&mt_lock
);
2096 init_rwsem(&newmt_lock
);
2098 mt_init_flags(&mt
, MT_FLAGS_ALLOC_RANGE
| MT_FLAGS_LOCK_EXTERN
);
2099 mt_set_external_lock(&mt
, &mt_lock
);
2101 down_write(&mt_lock
);
2102 for (i
= 0; i
<= nr_entries
; i
++) {
2103 mas_set_range(&mas
, i
*10, i
*10 + 5);
2104 mas_store_gfp(&mas
, xa_mk_value(i
), GFP_KERNEL
);
2107 for (i
= 0; i
< nr_fork
; i
++) {
2108 mt_init_flags(&newmt
,
2109 MT_FLAGS_ALLOC_RANGE
| MT_FLAGS_LOCK_EXTERN
);
2110 mt_set_external_lock(&newmt
, &newmt_lock
);
2112 down_write_nested(&newmt_lock
, SINGLE_DEPTH_NESTING
);
2113 ret
= __mt_dup(&mt
, &newmt
, GFP_KERNEL
);
2119 mas_set(&newmas
, 0);
2120 mas_for_each(&newmas
, val
, ULONG_MAX
)
2121 mas_store(&newmas
, val
);
2123 mas_destroy(&newmas
);
2124 mt_validate(&newmt
);
2125 __mt_destroy(&newmt
);
2126 up_write(&newmt_lock
);
2134 static noinline
void __init
next_prev_test(struct maple_tree
*mt
)
2138 MA_STATE(mas
, mt
, 0, 0);
2139 struct maple_enode
*mn
;
2140 static const unsigned long *level2
;
2141 static const unsigned long level2_64
[] = { 707, 1000, 710, 715, 720,
2143 static const unsigned long level2_32
[] = { 1747, 2000, 1750, 1755,
2145 unsigned long last_index
;
2150 last_index
= 0x138e;
2157 for (i
= 0; i
<= nr_entries
; i
++)
2158 mtree_store_range(mt
, i
*10, i
*10 + 5,
2159 xa_mk_value(i
), GFP_KERNEL
);
2162 for (i
= 0; i
<= nr_entries
/ 2; i
++) {
2163 mas_next(&mas
, 1000);
2164 if (mas_is_none(&mas
))
2171 mas_for_each(&mas
, val
, 1000) {
2178 mas_for_each(&mas
, val
, 1000) {
2184 * 680 - 685 = 0x61a00001930c
2186 * 690 - 695 = 0x61a00001930c
2187 * Check simple next/prev
2190 val
= mas_walk(&mas
);
2191 MT_BUG_ON(mt
, val
!= NULL
);
2193 val
= mas_next(&mas
, 1000);
2194 MT_BUG_ON(mt
, val
!= xa_mk_value(690 / 10));
2195 MT_BUG_ON(mt
, mas
.index
!= 690);
2196 MT_BUG_ON(mt
, mas
.last
!= 695);
2198 val
= mas_prev(&mas
, 0);
2199 MT_BUG_ON(mt
, val
!= xa_mk_value(680 / 10));
2200 MT_BUG_ON(mt
, mas
.index
!= 680);
2201 MT_BUG_ON(mt
, mas
.last
!= 685);
2203 val
= mas_next(&mas
, 1000);
2204 MT_BUG_ON(mt
, val
!= xa_mk_value(690 / 10));
2205 MT_BUG_ON(mt
, mas
.index
!= 690);
2206 MT_BUG_ON(mt
, mas
.last
!= 695);
2208 val
= mas_next(&mas
, 1000);
2209 MT_BUG_ON(mt
, val
!= xa_mk_value(700 / 10));
2210 MT_BUG_ON(mt
, mas
.index
!= 700);
2211 MT_BUG_ON(mt
, mas
.last
!= 705);
2213 /* Check across node boundaries of the tree */
2215 val
= mas_walk(&mas
);
2216 MT_BUG_ON(mt
, val
!= xa_mk_value(70 / 10));
2217 MT_BUG_ON(mt
, mas
.index
!= 70);
2218 MT_BUG_ON(mt
, mas
.last
!= 75);
2220 val
= mas_next(&mas
, 1000);
2221 MT_BUG_ON(mt
, val
!= xa_mk_value(80 / 10));
2222 MT_BUG_ON(mt
, mas
.index
!= 80);
2223 MT_BUG_ON(mt
, mas
.last
!= 85);
2225 val
= mas_prev(&mas
, 70);
2226 MT_BUG_ON(mt
, val
!= xa_mk_value(70 / 10));
2227 MT_BUG_ON(mt
, mas
.index
!= 70);
2228 MT_BUG_ON(mt
, mas
.last
!= 75);
2230 /* Check across two levels of the tree */
2232 mas_set(&mas
, level2
[0]);
2233 val
= mas_walk(&mas
);
2234 MT_BUG_ON(mt
, val
!= NULL
);
2235 val
= mas_next(&mas
, level2
[1]);
2236 MT_BUG_ON(mt
, val
!= xa_mk_value(level2
[2] / 10));
2237 MT_BUG_ON(mt
, mas
.index
!= level2
[2]);
2238 MT_BUG_ON(mt
, mas
.last
!= level2
[3]);
2241 val
= mas_next(&mas
, level2
[1]);
2242 MT_BUG_ON(mt
, val
!= xa_mk_value(level2
[4] / 10));
2243 MT_BUG_ON(mt
, mas
.index
!= level2
[4]);
2244 MT_BUG_ON(mt
, mas
.last
!= level2
[5]);
2245 MT_BUG_ON(mt
, mn
== mas
.node
);
2247 val
= mas_prev(&mas
, 0);
2248 MT_BUG_ON(mt
, val
!= xa_mk_value(level2
[2] / 10));
2249 MT_BUG_ON(mt
, mas
.index
!= level2
[2]);
2250 MT_BUG_ON(mt
, mas
.last
!= level2
[3]);
2252 /* Check running off the end and back on */
2253 mas_set(&mas
, nr_entries
* 10);
2254 val
= mas_walk(&mas
);
2255 MT_BUG_ON(mt
, val
!= xa_mk_value(nr_entries
));
2256 MT_BUG_ON(mt
, mas
.index
!= (nr_entries
* 10));
2257 MT_BUG_ON(mt
, mas
.last
!= (nr_entries
* 10 + 5));
2259 val
= mas_next(&mas
, ULONG_MAX
);
2260 MT_BUG_ON(mt
, val
!= NULL
);
2261 MT_BUG_ON(mt
, mas
.index
!= last_index
);
2262 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
2264 val
= mas_prev(&mas
, 0);
2265 MT_BUG_ON(mt
, val
!= xa_mk_value(nr_entries
));
2266 MT_BUG_ON(mt
, mas
.index
!= (nr_entries
* 10));
2267 MT_BUG_ON(mt
, mas
.last
!= (nr_entries
* 10 + 5));
2269 /* Check running off the start and back on */
2272 val
= mas_walk(&mas
);
2273 MT_BUG_ON(mt
, val
!= xa_mk_value(1));
2274 MT_BUG_ON(mt
, mas
.index
!= 10);
2275 MT_BUG_ON(mt
, mas
.last
!= 15);
2277 val
= mas_prev(&mas
, 0);
2278 MT_BUG_ON(mt
, val
!= xa_mk_value(0));
2279 MT_BUG_ON(mt
, mas
.index
!= 0);
2280 MT_BUG_ON(mt
, mas
.last
!= 5);
2282 val
= mas_prev(&mas
, 0);
2283 MT_BUG_ON(mt
, val
!= NULL
);
2284 MT_BUG_ON(mt
, mas
.index
!= 0);
2285 MT_BUG_ON(mt
, mas
.last
!= 5);
2286 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
2290 mas_store(&mas
, NULL
);
2295 val
= mas_prev(&mas
, 0);
2296 MT_BUG_ON(mt
, val
!= NULL
);
2297 MT_BUG_ON(mt
, mas
.index
!= 0);
2298 MT_BUG_ON(mt
, mas
.last
!= 9);
2304 mtree_store_range(mt
, 0, 0, xa_mk_value(0), GFP_KERNEL
);
2305 mtree_store_range(mt
, 5, 5, xa_mk_value(5), GFP_KERNEL
);
2308 val
= mas_prev(&mas
, 4);
2309 MT_BUG_ON(mt
, val
!= NULL
);
2315 /* Test spanning writes that require balancing right sibling or right cousin */
2316 static noinline
void __init
check_spanning_relatives(struct maple_tree
*mt
)
2319 unsigned long i
, nr_entries
= 1000;
2321 for (i
= 0; i
<= nr_entries
; i
++)
2322 mtree_store_range(mt
, i
*10, i
*10 + 5,
2323 xa_mk_value(i
), GFP_KERNEL
);
2326 mtree_store_range(mt
, 9365, 9955, NULL
, GFP_KERNEL
);
2329 static noinline
void __init
check_fuzzer(struct maple_tree
*mt
)
2332 * 1. Causes a spanning rebalance of a single root node.
2333 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2334 * entire right side is consumed.
2336 mtree_test_insert(mt
, 88, (void *)0xb1);
2337 mtree_test_insert(mt
, 84, (void *)0xa9);
2338 mtree_test_insert(mt
, 2, (void *)0x5);
2339 mtree_test_insert(mt
, 4, (void *)0x9);
2340 mtree_test_insert(mt
, 14, (void *)0x1d);
2341 mtree_test_insert(mt
, 7, (void *)0xf);
2342 mtree_test_insert(mt
, 12, (void *)0x19);
2343 mtree_test_insert(mt
, 18, (void *)0x25);
2344 mtree_test_store_range(mt
, 8, 18, (void *)0x11);
2349 * 2. Cause a spanning rebalance of two nodes in root.
2350 * Fixed by setting mast->r->max correctly.
2352 mt_init_flags(mt
, 0);
2353 mtree_test_store(mt
, 87, (void *)0xaf);
2354 mtree_test_store(mt
, 0, (void *)0x1);
2355 mtree_test_load(mt
, 4);
2356 mtree_test_insert(mt
, 4, (void *)0x9);
2357 mtree_test_store(mt
, 8, (void *)0x11);
2358 mtree_test_store(mt
, 44, (void *)0x59);
2359 mtree_test_store(mt
, 68, (void *)0x89);
2360 mtree_test_store(mt
, 2, (void *)0x5);
2361 mtree_test_insert(mt
, 43, (void *)0x57);
2362 mtree_test_insert(mt
, 24, (void *)0x31);
2363 mtree_test_insert(mt
, 844, (void *)0x699);
2364 mtree_test_store(mt
, 84, (void *)0xa9);
2365 mtree_test_store(mt
, 4, (void *)0x9);
2366 mtree_test_erase(mt
, 4);
2367 mtree_test_load(mt
, 5);
2368 mtree_test_erase(mt
, 0);
2372 * 3. Cause a node overflow on copy
2373 * Fixed by using the correct check for node size in mas_wr_modify()
2374 * Also discovered issue with metadata setting.
2376 mt_init_flags(mt
, 0);
2377 mtree_test_store_range(mt
, 0, ULONG_MAX
, (void *)0x1);
2378 mtree_test_store(mt
, 4, (void *)0x9);
2379 mtree_test_erase(mt
, 5);
2380 mtree_test_erase(mt
, 0);
2381 mtree_test_erase(mt
, 4);
2382 mtree_test_store(mt
, 5, (void *)0xb);
2383 mtree_test_erase(mt
, 5);
2384 mtree_test_store(mt
, 5, (void *)0xb);
2385 mtree_test_erase(mt
, 5);
2386 mtree_test_erase(mt
, 4);
2387 mtree_test_store(mt
, 4, (void *)0x9);
2388 mtree_test_store(mt
, 444, (void *)0x379);
2389 mtree_test_store(mt
, 0, (void *)0x1);
2390 mtree_test_load(mt
, 0);
2391 mtree_test_store(mt
, 5, (void *)0xb);
2392 mtree_test_erase(mt
, 0);
2396 * 4. spanning store failure due to writing incorrect pivot value at
2398 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2401 mt_init_flags(mt
, 0);
2402 mtree_test_insert(mt
, 261, (void *)0x20b);
2403 mtree_test_store(mt
, 516, (void *)0x409);
2404 mtree_test_store(mt
, 6, (void *)0xd);
2405 mtree_test_insert(mt
, 5, (void *)0xb);
2406 mtree_test_insert(mt
, 1256, (void *)0x9d1);
2407 mtree_test_store(mt
, 4, (void *)0x9);
2408 mtree_test_erase(mt
, 1);
2409 mtree_test_store(mt
, 56, (void *)0x71);
2410 mtree_test_insert(mt
, 1, (void *)0x3);
2411 mtree_test_store(mt
, 24, (void *)0x31);
2412 mtree_test_erase(mt
, 1);
2413 mtree_test_insert(mt
, 2263, (void *)0x11af);
2414 mtree_test_insert(mt
, 446, (void *)0x37d);
2415 mtree_test_store_range(mt
, 6, 45, (void *)0xd);
2416 mtree_test_store_range(mt
, 3, 446, (void *)0x7);
2420 * 5. mas_wr_extend_null() may overflow slots.
2421 * Fix by checking against wr_mas->node_end.
2423 mt_init_flags(mt
, 0);
2424 mtree_test_store(mt
, 48, (void *)0x61);
2425 mtree_test_store(mt
, 3, (void *)0x7);
2426 mtree_test_load(mt
, 0);
2427 mtree_test_store(mt
, 88, (void *)0xb1);
2428 mtree_test_store(mt
, 81, (void *)0xa3);
2429 mtree_test_insert(mt
, 0, (void *)0x1);
2430 mtree_test_insert(mt
, 8, (void *)0x11);
2431 mtree_test_insert(mt
, 4, (void *)0x9);
2432 mtree_test_insert(mt
, 2480, (void *)0x1361);
2433 mtree_test_insert(mt
, ULONG_MAX
,
2434 (void *)0xffffffffffffffff);
2435 mtree_test_erase(mt
, ULONG_MAX
);
2439 * 6. When reusing a node with an implied pivot and the node is
2440 * shrinking, old data would be left in the implied slot
2441 * Fixed by checking the last pivot for the mas->max and clear
2442 * accordingly. This only affected the left-most node as that node is
2443 * the only one allowed to end in NULL.
2445 mt_init_flags(mt
, 0);
2446 mtree_test_erase(mt
, 3);
2447 mtree_test_insert(mt
, 22, (void *)0x2d);
2448 mtree_test_insert(mt
, 15, (void *)0x1f);
2449 mtree_test_load(mt
, 2);
2450 mtree_test_insert(mt
, 1, (void *)0x3);
2451 mtree_test_insert(mt
, 1, (void *)0x3);
2452 mtree_test_insert(mt
, 5, (void *)0xb);
2453 mtree_test_erase(mt
, 1);
2454 mtree_test_insert(mt
, 1, (void *)0x3);
2455 mtree_test_insert(mt
, 4, (void *)0x9);
2456 mtree_test_insert(mt
, 1, (void *)0x3);
2457 mtree_test_erase(mt
, 1);
2458 mtree_test_insert(mt
, 2, (void *)0x5);
2459 mtree_test_insert(mt
, 1, (void *)0x3);
2460 mtree_test_erase(mt
, 3);
2461 mtree_test_insert(mt
, 22, (void *)0x2d);
2462 mtree_test_insert(mt
, 15, (void *)0x1f);
2463 mtree_test_insert(mt
, 2, (void *)0x5);
2464 mtree_test_insert(mt
, 1, (void *)0x3);
2465 mtree_test_insert(mt
, 8, (void *)0x11);
2466 mtree_test_load(mt
, 2);
2467 mtree_test_insert(mt
, 1, (void *)0x3);
2468 mtree_test_insert(mt
, 1, (void *)0x3);
2469 mtree_test_store(mt
, 1, (void *)0x3);
2470 mtree_test_insert(mt
, 5, (void *)0xb);
2471 mtree_test_erase(mt
, 1);
2472 mtree_test_insert(mt
, 1, (void *)0x3);
2473 mtree_test_insert(mt
, 4, (void *)0x9);
2474 mtree_test_insert(mt
, 1, (void *)0x3);
2475 mtree_test_erase(mt
, 1);
2476 mtree_test_insert(mt
, 2, (void *)0x5);
2477 mtree_test_insert(mt
, 1, (void *)0x3);
2478 mtree_test_erase(mt
, 3);
2479 mtree_test_insert(mt
, 22, (void *)0x2d);
2480 mtree_test_insert(mt
, 15, (void *)0x1f);
2481 mtree_test_insert(mt
, 2, (void *)0x5);
2482 mtree_test_insert(mt
, 1, (void *)0x3);
2483 mtree_test_insert(mt
, 8, (void *)0x11);
2484 mtree_test_insert(mt
, 12, (void *)0x19);
2485 mtree_test_erase(mt
, 1);
2486 mtree_test_store_range(mt
, 4, 62, (void *)0x9);
2487 mtree_test_erase(mt
, 62);
2488 mtree_test_store_range(mt
, 1, 0, (void *)0x3);
2489 mtree_test_insert(mt
, 11, (void *)0x17);
2490 mtree_test_insert(mt
, 3, (void *)0x7);
2491 mtree_test_insert(mt
, 3, (void *)0x7);
2492 mtree_test_store(mt
, 62, (void *)0x7d);
2493 mtree_test_erase(mt
, 62);
2494 mtree_test_store_range(mt
, 1, 15, (void *)0x3);
2495 mtree_test_erase(mt
, 1);
2496 mtree_test_insert(mt
, 22, (void *)0x2d);
2497 mtree_test_insert(mt
, 12, (void *)0x19);
2498 mtree_test_erase(mt
, 1);
2499 mtree_test_insert(mt
, 3, (void *)0x7);
2500 mtree_test_store(mt
, 62, (void *)0x7d);
2501 mtree_test_erase(mt
, 62);
2502 mtree_test_insert(mt
, 122, (void *)0xf5);
2503 mtree_test_store(mt
, 3, (void *)0x7);
2504 mtree_test_insert(mt
, 0, (void *)0x1);
2505 mtree_test_store_range(mt
, 0, 1, (void *)0x1);
2506 mtree_test_insert(mt
, 85, (void *)0xab);
2507 mtree_test_insert(mt
, 72, (void *)0x91);
2508 mtree_test_insert(mt
, 81, (void *)0xa3);
2509 mtree_test_insert(mt
, 726, (void *)0x5ad);
2510 mtree_test_insert(mt
, 0, (void *)0x1);
2511 mtree_test_insert(mt
, 1, (void *)0x3);
2512 mtree_test_store(mt
, 51, (void *)0x67);
2513 mtree_test_insert(mt
, 611, (void *)0x4c7);
2514 mtree_test_insert(mt
, 485, (void *)0x3cb);
2515 mtree_test_insert(mt
, 1, (void *)0x3);
2516 mtree_test_erase(mt
, 1);
2517 mtree_test_insert(mt
, 0, (void *)0x1);
2518 mtree_test_insert(mt
, 1, (void *)0x3);
2519 mtree_test_insert_range(mt
, 26, 1, (void *)0x35);
2520 mtree_test_load(mt
, 1);
2521 mtree_test_store_range(mt
, 1, 22, (void *)0x3);
2522 mtree_test_insert(mt
, 1, (void *)0x3);
2523 mtree_test_erase(mt
, 1);
2524 mtree_test_load(mt
, 53);
2525 mtree_test_load(mt
, 1);
2526 mtree_test_store_range(mt
, 1, 1, (void *)0x3);
2527 mtree_test_insert(mt
, 222, (void *)0x1bd);
2528 mtree_test_insert(mt
, 485, (void *)0x3cb);
2529 mtree_test_insert(mt
, 1, (void *)0x3);
2530 mtree_test_erase(mt
, 1);
2531 mtree_test_load(mt
, 0);
2532 mtree_test_insert(mt
, 21, (void *)0x2b);
2533 mtree_test_insert(mt
, 3, (void *)0x7);
2534 mtree_test_store(mt
, 621, (void *)0x4db);
2535 mtree_test_insert(mt
, 0, (void *)0x1);
2536 mtree_test_erase(mt
, 5);
2537 mtree_test_insert(mt
, 1, (void *)0x3);
2538 mtree_test_store(mt
, 62, (void *)0x7d);
2539 mtree_test_erase(mt
, 62);
2540 mtree_test_store_range(mt
, 1, 0, (void *)0x3);
2541 mtree_test_insert(mt
, 22, (void *)0x2d);
2542 mtree_test_insert(mt
, 12, (void *)0x19);
2543 mtree_test_erase(mt
, 1);
2544 mtree_test_insert(mt
, 1, (void *)0x3);
2545 mtree_test_store_range(mt
, 4, 62, (void *)0x9);
2546 mtree_test_erase(mt
, 62);
2547 mtree_test_erase(mt
, 1);
2548 mtree_test_load(mt
, 1);
2549 mtree_test_store_range(mt
, 1, 22, (void *)0x3);
2550 mtree_test_insert(mt
, 1, (void *)0x3);
2551 mtree_test_erase(mt
, 1);
2552 mtree_test_load(mt
, 53);
2553 mtree_test_load(mt
, 1);
2554 mtree_test_store_range(mt
, 1, 1, (void *)0x3);
2555 mtree_test_insert(mt
, 222, (void *)0x1bd);
2556 mtree_test_insert(mt
, 485, (void *)0x3cb);
2557 mtree_test_insert(mt
, 1, (void *)0x3);
2558 mtree_test_erase(mt
, 1);
2559 mtree_test_insert(mt
, 1, (void *)0x3);
2560 mtree_test_load(mt
, 0);
2561 mtree_test_load(mt
, 0);
2565 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2566 * data by overwriting it first - that way metadata is of no concern.
2568 mt_init_flags(mt
, 0);
2569 mtree_test_load(mt
, 1);
2570 mtree_test_insert(mt
, 102, (void *)0xcd);
2571 mtree_test_erase(mt
, 2);
2572 mtree_test_erase(mt
, 0);
2573 mtree_test_load(mt
, 0);
2574 mtree_test_insert(mt
, 4, (void *)0x9);
2575 mtree_test_insert(mt
, 2, (void *)0x5);
2576 mtree_test_insert(mt
, 110, (void *)0xdd);
2577 mtree_test_insert(mt
, 1, (void *)0x3);
2578 mtree_test_insert_range(mt
, 5, 0, (void *)0xb);
2579 mtree_test_erase(mt
, 2);
2580 mtree_test_store(mt
, 0, (void *)0x1);
2581 mtree_test_store(mt
, 112, (void *)0xe1);
2582 mtree_test_insert(mt
, 21, (void *)0x2b);
2583 mtree_test_store(mt
, 1, (void *)0x3);
2584 mtree_test_insert_range(mt
, 110, 2, (void *)0xdd);
2585 mtree_test_store(mt
, 2, (void *)0x5);
2586 mtree_test_load(mt
, 22);
2587 mtree_test_erase(mt
, 2);
2588 mtree_test_store(mt
, 210, (void *)0x1a5);
2589 mtree_test_store_range(mt
, 0, 2, (void *)0x1);
2590 mtree_test_store(mt
, 2, (void *)0x5);
2591 mtree_test_erase(mt
, 2);
2592 mtree_test_erase(mt
, 22);
2593 mtree_test_erase(mt
, 1);
2594 mtree_test_erase(mt
, 2);
2595 mtree_test_store(mt
, 0, (void *)0x1);
2596 mtree_test_load(mt
, 112);
2597 mtree_test_insert(mt
, 2, (void *)0x5);
2598 mtree_test_erase(mt
, 2);
2599 mtree_test_store(mt
, 1, (void *)0x3);
2600 mtree_test_insert_range(mt
, 1, 2, (void *)0x3);
2601 mtree_test_erase(mt
, 0);
2602 mtree_test_erase(mt
, 2);
2603 mtree_test_store(mt
, 2, (void *)0x5);
2604 mtree_test_erase(mt
, 0);
2605 mtree_test_erase(mt
, 2);
2606 mtree_test_store(mt
, 0, (void *)0x1);
2607 mtree_test_store(mt
, 0, (void *)0x1);
2608 mtree_test_erase(mt
, 2);
2609 mtree_test_store(mt
, 2, (void *)0x5);
2610 mtree_test_erase(mt
, 2);
2611 mtree_test_insert(mt
, 2, (void *)0x5);
2612 mtree_test_insert_range(mt
, 1, 2, (void *)0x3);
2613 mtree_test_erase(mt
, 0);
2614 mtree_test_erase(mt
, 2);
2615 mtree_test_store(mt
, 0, (void *)0x1);
2616 mtree_test_load(mt
, 112);
2617 mtree_test_store_range(mt
, 110, 12, (void *)0xdd);
2618 mtree_test_store(mt
, 2, (void *)0x5);
2619 mtree_test_load(mt
, 110);
2620 mtree_test_insert_range(mt
, 4, 71, (void *)0x9);
2621 mtree_test_load(mt
, 2);
2622 mtree_test_store(mt
, 2, (void *)0x5);
2623 mtree_test_insert_range(mt
, 11, 22, (void *)0x17);
2624 mtree_test_erase(mt
, 12);
2625 mtree_test_store(mt
, 2, (void *)0x5);
2626 mtree_test_load(mt
, 22);
2631 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2632 * may be set incorrectly to the final pivot and not the right max.
2633 * Fix by setting the left max to orig right max if the entire node is
2636 mt_init_flags(mt
, 0);
2637 mtree_test_store(mt
, 6, (void *)0xd);
2638 mtree_test_store(mt
, 67, (void *)0x87);
2639 mtree_test_insert(mt
, 15, (void *)0x1f);
2640 mtree_test_insert(mt
, 6716, (void *)0x3479);
2641 mtree_test_store(mt
, 61, (void *)0x7b);
2642 mtree_test_insert(mt
, 13, (void *)0x1b);
2643 mtree_test_store(mt
, 8, (void *)0x11);
2644 mtree_test_insert(mt
, 1, (void *)0x3);
2645 mtree_test_load(mt
, 0);
2646 mtree_test_erase(mt
, 67167);
2647 mtree_test_insert_range(mt
, 6, 7167, (void *)0xd);
2648 mtree_test_insert(mt
, 6, (void *)0xd);
2649 mtree_test_erase(mt
, 67);
2650 mtree_test_insert(mt
, 1, (void *)0x3);
2651 mtree_test_erase(mt
, 667167);
2652 mtree_test_insert(mt
, 6, (void *)0xd);
2653 mtree_test_store(mt
, 67, (void *)0x87);
2654 mtree_test_insert(mt
, 5, (void *)0xb);
2655 mtree_test_erase(mt
, 1);
2656 mtree_test_insert(mt
, 6, (void *)0xd);
2657 mtree_test_erase(mt
, 67);
2658 mtree_test_insert(mt
, 15, (void *)0x1f);
2659 mtree_test_insert(mt
, 67167, (void *)0x20cbf);
2660 mtree_test_insert(mt
, 1, (void *)0x3);
2661 mtree_test_load(mt
, 7);
2662 mtree_test_insert(mt
, 16, (void *)0x21);
2663 mtree_test_insert(mt
, 36, (void *)0x49);
2664 mtree_test_store(mt
, 67, (void *)0x87);
2665 mtree_test_store(mt
, 6, (void *)0xd);
2666 mtree_test_insert(mt
, 367, (void *)0x2df);
2667 mtree_test_insert(mt
, 115, (void *)0xe7);
2668 mtree_test_store(mt
, 0, (void *)0x1);
2669 mtree_test_store_range(mt
, 1, 3, (void *)0x3);
2670 mtree_test_store(mt
, 1, (void *)0x3);
2671 mtree_test_erase(mt
, 67167);
2672 mtree_test_insert_range(mt
, 6, 47, (void *)0xd);
2673 mtree_test_store(mt
, 1, (void *)0x3);
2674 mtree_test_insert_range(mt
, 1, 67, (void *)0x3);
2675 mtree_test_load(mt
, 67);
2676 mtree_test_insert(mt
, 1, (void *)0x3);
2677 mtree_test_erase(mt
, 67167);
2681 * 9. spanning store to the end of data caused an invalid metadata
2682 * length which resulted in a crash eventually.
2683 * Fix by checking if there is a value in pivot before incrementing the
2684 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2685 * abstract the two locations this happens into a function called
2686 * mas_leaf_set_meta().
2688 mt_init_flags(mt
, 0);
2689 mtree_test_insert(mt
, 21, (void *)0x2b);
2690 mtree_test_insert(mt
, 12, (void *)0x19);
2691 mtree_test_insert(mt
, 6, (void *)0xd);
2692 mtree_test_insert(mt
, 8, (void *)0x11);
2693 mtree_test_insert(mt
, 2, (void *)0x5);
2694 mtree_test_insert(mt
, 91, (void *)0xb7);
2695 mtree_test_insert(mt
, 18, (void *)0x25);
2696 mtree_test_insert(mt
, 81, (void *)0xa3);
2697 mtree_test_store_range(mt
, 0, 128, (void *)0x1);
2698 mtree_test_store(mt
, 1, (void *)0x3);
2699 mtree_test_erase(mt
, 8);
2700 mtree_test_insert(mt
, 11, (void *)0x17);
2701 mtree_test_insert(mt
, 8, (void *)0x11);
2702 mtree_test_insert(mt
, 21, (void *)0x2b);
2703 mtree_test_insert(mt
, 2, (void *)0x5);
2704 mtree_test_insert(mt
, ULONG_MAX
- 10, (void *)0xffffffffffffffeb);
2705 mtree_test_erase(mt
, ULONG_MAX
- 10);
2706 mtree_test_store_range(mt
, 0, 281, (void *)0x1);
2707 mtree_test_erase(mt
, 2);
2708 mtree_test_insert(mt
, 1211, (void *)0x977);
2709 mtree_test_insert(mt
, 111, (void *)0xdf);
2710 mtree_test_insert(mt
, 13, (void *)0x1b);
2711 mtree_test_insert(mt
, 211, (void *)0x1a7);
2712 mtree_test_insert(mt
, 11, (void *)0x17);
2713 mtree_test_insert(mt
, 5, (void *)0xb);
2714 mtree_test_insert(mt
, 1218, (void *)0x985);
2715 mtree_test_insert(mt
, 61, (void *)0x7b);
2716 mtree_test_store(mt
, 1, (void *)0x3);
2717 mtree_test_insert(mt
, 121, (void *)0xf3);
2718 mtree_test_insert(mt
, 8, (void *)0x11);
2719 mtree_test_insert(mt
, 21, (void *)0x2b);
2720 mtree_test_insert(mt
, 2, (void *)0x5);
2721 mtree_test_insert(mt
, ULONG_MAX
- 10, (void *)0xffffffffffffffeb);
2722 mtree_test_erase(mt
, ULONG_MAX
- 10);
2725 /* duplicate the tree with a specific gap */
2726 static noinline
void __init
check_dup_gaps(struct maple_tree
*mt
,
2727 unsigned long nr_entries
, bool zero_start
,
2730 unsigned long i
= 0;
2731 struct maple_tree newmt
;
2734 MA_STATE(mas
, mt
, 0, 0);
2735 MA_STATE(newmas
, &newmt
, 0, 0);
2736 struct rw_semaphore newmt_lock
;
2738 init_rwsem(&newmt_lock
);
2739 mt_set_external_lock(&newmt
, &newmt_lock
);
2744 mt_zero_nr_tallocated();
2745 for (; i
<= nr_entries
; i
++)
2746 mtree_store_range(mt
, i
*10, (i
+1)*10 - gap
,
2747 xa_mk_value(i
), GFP_KERNEL
);
2749 mt_init_flags(&newmt
, MT_FLAGS_ALLOC_RANGE
| MT_FLAGS_LOCK_EXTERN
);
2750 mt_set_non_kernel(99999);
2751 down_write(&newmt_lock
);
2752 ret
= mas_expected_entries(&newmas
, nr_entries
);
2753 mt_set_non_kernel(0);
2754 MT_BUG_ON(mt
, ret
!= 0);
2757 mas_for_each(&mas
, tmp
, ULONG_MAX
) {
2758 newmas
.index
= mas
.index
;
2759 newmas
.last
= mas
.last
;
2760 mas_store(&newmas
, tmp
);
2763 mas_destroy(&newmas
);
2765 __mt_destroy(&newmt
);
2766 up_write(&newmt_lock
);
2769 /* Duplicate many sizes of trees. Mainly to test expected entry values */
2770 static noinline
void __init
check_dup(struct maple_tree
*mt
)
2773 int big_start
= 100010;
2775 /* Check with a value at zero */
2776 for (i
= 10; i
< 1000; i
++) {
2777 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
2778 check_dup_gaps(mt
, i
, true, 5);
2785 /* Check with a value at zero, no gap */
2786 for (i
= 1000; i
< 2000; i
++) {
2787 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
2788 check_dup_gaps(mt
, i
, true, 0);
2795 /* Check with a value at zero and unreasonably large */
2796 for (i
= big_start
; i
< big_start
+ 10; i
++) {
2797 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
2798 check_dup_gaps(mt
, i
, true, 5);
2805 /* Small to medium size not starting at zero*/
2806 for (i
= 200; i
< 1000; i
++) {
2807 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
2808 check_dup_gaps(mt
, i
, false, 5);
2815 /* Unreasonably large not starting at zero*/
2816 for (i
= big_start
; i
< big_start
+ 10; i
++) {
2817 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
2818 check_dup_gaps(mt
, i
, false, 5);
2825 /* Check non-allocation tree not starting at zero */
2826 for (i
= 1500; i
< 3000; i
++) {
2827 mt_init_flags(mt
, 0);
2828 check_dup_gaps(mt
, i
, false, 5);
2837 /* Check non-allocation tree starting at zero */
2838 for (i
= 200; i
< 1000; i
++) {
2839 mt_init_flags(mt
, 0);
2840 check_dup_gaps(mt
, i
, true, 5);
2847 /* Unreasonably large */
2848 for (i
= big_start
+ 5; i
< big_start
+ 10; i
++) {
2849 mt_init_flags(mt
, 0);
2850 check_dup_gaps(mt
, i
, true, 5);
2858 static noinline
void __init
check_bnode_min_spanning(struct maple_tree
*mt
)
2861 MA_STATE(mas
, mt
, 0, 0);
2863 mt_set_non_kernel(9999);
2866 mas_set_range(&mas
, i
*10, i
*10+9);
2867 mas_store(&mas
, check_bnode_min_spanning
);
2870 mas_set_range(&mas
, 240, 509);
2871 mas_store(&mas
, NULL
);
2874 mt_set_non_kernel(0);
2877 static noinline
void __init
check_empty_area_window(struct maple_tree
*mt
)
2879 unsigned long i
, nr_entries
= 20;
2880 MA_STATE(mas
, mt
, 0, 0);
2882 for (i
= 1; i
<= nr_entries
; i
++)
2883 mtree_store_range(mt
, i
*10, i
*10 + 9,
2884 xa_mk_value(i
), GFP_KERNEL
);
2886 /* Create another hole besides the one at 0 */
2887 mtree_store_range(mt
, 160, 169, NULL
, GFP_KERNEL
);
2889 /* Check lower bounds that don't fit */
2891 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 5, 90, 10) != -EBUSY
);
2894 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 6, 90, 5) != -EBUSY
);
2896 /* Check lower bound that does fit */
2898 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 5, 90, 5) != 0);
2899 MT_BUG_ON(mt
, mas
.index
!= 5);
2900 MT_BUG_ON(mt
, mas
.last
!= 9);
2903 /* Check one gap that doesn't fit and one that does */
2906 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 5, 217, 9) != 0);
2907 MT_BUG_ON(mt
, mas
.index
!= 161);
2908 MT_BUG_ON(mt
, mas
.last
!= 169);
2910 /* Check one gap that does fit above the min */
2912 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 100, 218, 3) != 0);
2913 MT_BUG_ON(mt
, mas
.index
!= 216);
2914 MT_BUG_ON(mt
, mas
.last
!= 218);
2916 /* Check size that doesn't fit any gap */
2918 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 100, 218, 16) != -EBUSY
);
2921 * Check size that doesn't fit the lower end of the window but
2925 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 167, 200, 4) != -EBUSY
);
2928 * Check size that doesn't fit the upper end of the window but
2932 MT_BUG_ON(mt
, mas_empty_area_rev(&mas
, 100, 162, 4) != -EBUSY
);
2934 /* Check mas_empty_area forward */
2936 MT_BUG_ON(mt
, mas_empty_area(&mas
, 0, 100, 9) != 0);
2937 MT_BUG_ON(mt
, mas
.index
!= 0);
2938 MT_BUG_ON(mt
, mas
.last
!= 8);
2941 MT_BUG_ON(mt
, mas_empty_area(&mas
, 0, 100, 4) != 0);
2942 MT_BUG_ON(mt
, mas
.index
!= 0);
2943 MT_BUG_ON(mt
, mas
.last
!= 3);
2946 MT_BUG_ON(mt
, mas_empty_area(&mas
, 0, 100, 11) != -EBUSY
);
2949 MT_BUG_ON(mt
, mas_empty_area(&mas
, 5, 100, 6) != -EBUSY
);
2952 MT_BUG_ON(mt
, mas_empty_area(&mas
, 0, 8, 10) != -EINVAL
);
2955 mas_empty_area(&mas
, 100, 165, 3);
2958 MT_BUG_ON(mt
, mas_empty_area(&mas
, 100, 163, 6) != -EBUSY
);
2962 static noinline
void __init
check_empty_area_fill(struct maple_tree
*mt
)
2964 const unsigned long max
= 0x25D78000;
2967 MA_STATE(mas
, mt
, 0, 0);
2969 mt_set_non_kernel(99999);
2970 for (shift
= 12; shift
<= 16; shift
++) {
2976 MT_BUG_ON(mt
, mas_empty_area(&mas
, 0, max
, size
) != 0);
2977 MT_BUG_ON(mt
, mas
.last
!= mas
.index
+ size
- 1);
2978 mas_store_gfp(&mas
, (void *)size
, GFP_KERNEL
);
2984 /* No space left. */
2987 MT_BUG_ON(mt
, mas_empty_area(&mas
, 0, max
, size
) != -EBUSY
);
2990 /* Fill a depth 3 node to the maximum */
2991 for (unsigned long i
= 629440511; i
<= 629440800; i
+= 6)
2992 mtree_store_range(mt
, i
, i
+ 5, (void *)i
, GFP_KERNEL
);
2993 /* Make space in the second-last depth 4 node */
2994 mtree_erase(mt
, 631668735);
2995 /* Make space in the last depth 4 node */
2996 mtree_erase(mt
, 629506047);
2998 /* Search from just after the gap in the second-last depth 4 */
3000 MT_BUG_ON(mt
, mas_empty_area(&mas
, 629506048, 690000000, 0x5000) != 0);
3002 mt_set_non_kernel(0);
3006 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
3008 * The table below shows the single entry tree (0-0 pointer) and normal tree
3011 * Function ENTRY Start Result index & last
3012 * ┬ ┬ ┬ ┬ ┬
3013 * │ │ │ │ └─ the final range
3014 * │ │ │ └─ The node value after execution
3015 * │ │ └─ The node value before execution
3016 * │ └─ If the entry exists or does not exists (DNE)
3017 * └─ The function name
3019 * Function ENTRY Start Result index & last
3022 * Single entry tree at 0-0
3023 * ------------------------
3024 * DNE MAS_START MAS_NONE 1 - oo
3025 * DNE MAS_PAUSE MAS_NONE 1 - oo
3026 * DNE MAS_ROOT MAS_NONE 1 - oo
3028 * DNE MAS_NONE MAS_ROOT 0
3030 * DNE MAS_NONE MAS_NONE 1 - oo
3034 * exists MAS_START active range
3035 * DNE MAS_START active set to last range
3036 * exists MAS_PAUSE active range
3037 * DNE MAS_PAUSE active set to last range
3038 * exists MAS_NONE active range
3039 * exists active active range
3040 * DNE active active set to last range
3041 * ERANGE active MAS_OVERFLOW last range
3043 * Function ENTRY Start Result index & last
3046 * Single entry tree at 0-0
3047 * ------------------------
3049 * exists MAS_START MAS_ROOT 0
3050 * exists MAS_PAUSE MAS_ROOT 0
3051 * exists MAS_NONE MAS_ROOT 0
3054 * DNE MAS_START MAS_NONE 0
3055 * DNE MAS_PAUSE MAS_NONE 0
3056 * DNE MAS_NONE MAS_NONE 0
3057 * DNE MAS_ROOT MAS_NONE 0
3061 * exists MAS_START active range
3062 * DNE MAS_START active set to min
3063 * exists MAS_PAUSE active range
3064 * DNE MAS_PAUSE active set to min
3065 * exists MAS_NONE active range
3066 * DNE MAS_NONE MAS_NONE set to min
3067 * any MAS_ROOT MAS_NONE 0
3068 * exists active active range
3069 * DNE active active last range
3070 * ERANGE active MAS_UNDERFLOW last range
3072 * Function ENTRY Start Result index & last
3074 * - at index or next
3075 * Single entry tree at 0-0
3076 * ------------------------
3078 * DNE MAS_START MAS_NONE 0
3079 * DNE MAS_PAUSE MAS_NONE 0
3080 * DNE MAS_ROOT MAS_NONE 0
3081 * DNE MAS_NONE MAS_NONE 1
3083 * exists MAS_START MAS_ROOT 0
3084 * exists MAS_PAUSE MAS_ROOT 0
3085 * exists MAS_NONE MAS_ROOT 0
3089 * exists MAS_START active range
3090 * DNE MAS_START active set to max
3091 * exists MAS_PAUSE active range
3092 * DNE MAS_PAUSE active set to max
3093 * exists MAS_NONE active range (start at last)
3094 * exists active active range
3095 * DNE active active last range (max < last)
3097 * Function ENTRY Start Result index & last
3099 * - at index or before
3100 * Single entry tree at 0-0
3101 * ------------------------
3103 * exists MAS_START MAS_ROOT 0
3104 * exists MAS_PAUSE MAS_ROOT 0
3105 * exists MAS_NONE MAS_ROOT 0
3107 * DNE MAS_START MAS_NONE 0
3108 * DNE MAS_PAUSE MAS_NONE 0
3109 * DNE MAS_NONE MAS_NONE 0
3110 * DNE MAS_ROOT MAS_NONE 0
3114 * exists MAS_START active range
3115 * DNE MAS_START active set to min
3116 * exists MAS_PAUSE active range
3117 * DNE MAS_PAUSE active set to min
3118 * exists MAS_NONE active range (start at index)
3119 * exists active active range
3120 * DNE active active last range (min > index)
3122 * Function ENTRY Start Result index & last
3125 * Single entry tree at 0-0
3126 * ------------------------
3128 * DNE MAS_START MAS_ROOT 1 - oo
3129 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3130 * DNE MAS_NONE MAS_ROOT 1 - oo
3131 * DNE MAS_ROOT MAS_ROOT 1 - oo
3133 * exists MAS_START MAS_ROOT 0
3134 * exists MAS_PAUSE MAS_ROOT 0
3135 * exists MAS_NONE MAS_ROOT 0
3136 * exists MAS_ROOT MAS_ROOT 0
3140 * exists MAS_START active range
3141 * DNE MAS_START active range of NULL
3142 * exists MAS_PAUSE active range
3143 * DNE MAS_PAUSE active range of NULL
3144 * exists MAS_NONE active range
3145 * DNE MAS_NONE active range of NULL
3146 * exists active active range
3147 * DNE active active range of NULL
3150 static noinline
void __init
check_state_handling(struct maple_tree
*mt
)
3152 MA_STATE(mas
, mt
, 0, 0);
3153 void *entry
, *ptr
= (void *) 0x1234500;
3157 /* Check MAS_ROOT First */
3158 mtree_store_range(mt
, 0, 0, ptr
, GFP_KERNEL
);
3161 /* prev: Start -> underflow*/
3162 entry
= mas_prev(&mas
, 0);
3163 MT_BUG_ON(mt
, entry
!= NULL
);
3164 MT_BUG_ON(mt
, mas
.status
!= ma_underflow
);
3166 /* prev: Start -> root */
3168 entry
= mas_prev(&mas
, 0);
3169 MT_BUG_ON(mt
, entry
!= ptr
);
3170 MT_BUG_ON(mt
, mas
.index
!= 0);
3171 MT_BUG_ON(mt
, mas
.last
!= 0);
3172 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3174 /* prev: pause -> root */
3177 entry
= mas_prev(&mas
, 0);
3178 MT_BUG_ON(mt
, entry
!= ptr
);
3179 MT_BUG_ON(mt
, mas
.index
!= 0);
3180 MT_BUG_ON(mt
, mas
.last
!= 0);
3181 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3183 /* next: start -> none */
3185 entry
= mas_next(&mas
, ULONG_MAX
);
3186 MT_BUG_ON(mt
, mas
.index
!= 1);
3187 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3188 MT_BUG_ON(mt
, entry
!= NULL
);
3189 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3191 /* next: start -> none*/
3193 entry
= mas_next(&mas
, ULONG_MAX
);
3194 MT_BUG_ON(mt
, mas
.index
!= 1);
3195 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3196 MT_BUG_ON(mt
, entry
!= NULL
);
3197 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3199 /* find: start -> root */
3201 entry
= mas_find(&mas
, ULONG_MAX
);
3202 MT_BUG_ON(mt
, entry
!= ptr
);
3203 MT_BUG_ON(mt
, mas
.index
!= 0);
3204 MT_BUG_ON(mt
, mas
.last
!= 0);
3205 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3207 /* find: root -> none */
3208 entry
= mas_find(&mas
, ULONG_MAX
);
3209 MT_BUG_ON(mt
, entry
!= NULL
);
3210 MT_BUG_ON(mt
, mas
.index
!= 1);
3211 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3212 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3214 /* find: none -> none */
3215 entry
= mas_find(&mas
, ULONG_MAX
);
3216 MT_BUG_ON(mt
, entry
!= NULL
);
3217 MT_BUG_ON(mt
, mas
.index
!= 1);
3218 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3219 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3221 /* find: start -> none */
3223 entry
= mas_find(&mas
, ULONG_MAX
);
3224 MT_BUG_ON(mt
, entry
!= NULL
);
3225 MT_BUG_ON(mt
, mas
.index
!= 1);
3226 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3227 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3229 /* find_rev: none -> root */
3230 entry
= mas_find_rev(&mas
, 0);
3231 MT_BUG_ON(mt
, entry
!= ptr
);
3232 MT_BUG_ON(mt
, mas
.index
!= 0);
3233 MT_BUG_ON(mt
, mas
.last
!= 0);
3234 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3236 /* find_rev: start -> root */
3238 entry
= mas_find_rev(&mas
, 0);
3239 MT_BUG_ON(mt
, entry
!= ptr
);
3240 MT_BUG_ON(mt
, mas
.index
!= 0);
3241 MT_BUG_ON(mt
, mas
.last
!= 0);
3242 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3244 /* find_rev: root -> none */
3245 entry
= mas_find_rev(&mas
, 0);
3246 MT_BUG_ON(mt
, entry
!= NULL
);
3247 MT_BUG_ON(mt
, mas
.index
!= 0);
3248 MT_BUG_ON(mt
, mas
.last
!= 0);
3249 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3251 /* find_rev: none -> none */
3252 entry
= mas_find_rev(&mas
, 0);
3253 MT_BUG_ON(mt
, entry
!= NULL
);
3254 MT_BUG_ON(mt
, mas
.index
!= 0);
3255 MT_BUG_ON(mt
, mas
.last
!= 0);
3256 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3258 /* find_rev: start -> root */
3260 entry
= mas_find_rev(&mas
, 0);
3261 MT_BUG_ON(mt
, entry
!= ptr
);
3262 MT_BUG_ON(mt
, mas
.index
!= 0);
3263 MT_BUG_ON(mt
, mas
.last
!= 0);
3264 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3266 /* walk: start -> none */
3268 entry
= mas_walk(&mas
);
3269 MT_BUG_ON(mt
, entry
!= NULL
);
3270 MT_BUG_ON(mt
, mas
.index
!= 1);
3271 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3272 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3274 /* walk: pause -> none*/
3277 entry
= mas_walk(&mas
);
3278 MT_BUG_ON(mt
, entry
!= NULL
);
3279 MT_BUG_ON(mt
, mas
.index
!= 1);
3280 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3281 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3283 /* walk: none -> none */
3284 mas
.index
= mas
.last
= 10;
3285 entry
= mas_walk(&mas
);
3286 MT_BUG_ON(mt
, entry
!= NULL
);
3287 MT_BUG_ON(mt
, mas
.index
!= 1);
3288 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3289 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3291 /* walk: none -> none */
3292 entry
= mas_walk(&mas
);
3293 MT_BUG_ON(mt
, entry
!= NULL
);
3294 MT_BUG_ON(mt
, mas
.index
!= 1);
3295 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3296 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3298 /* walk: start -> root */
3300 entry
= mas_walk(&mas
);
3301 MT_BUG_ON(mt
, entry
!= ptr
);
3302 MT_BUG_ON(mt
, mas
.index
!= 0);
3303 MT_BUG_ON(mt
, mas
.last
!= 0);
3304 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3306 /* walk: pause -> root */
3309 entry
= mas_walk(&mas
);
3310 MT_BUG_ON(mt
, entry
!= ptr
);
3311 MT_BUG_ON(mt
, mas
.index
!= 0);
3312 MT_BUG_ON(mt
, mas
.last
!= 0);
3313 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3315 /* walk: none -> root */
3316 mas
.status
= ma_none
;
3317 entry
= mas_walk(&mas
);
3318 MT_BUG_ON(mt
, entry
!= ptr
);
3319 MT_BUG_ON(mt
, mas
.index
!= 0);
3320 MT_BUG_ON(mt
, mas
.last
!= 0);
3321 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3323 /* walk: root -> root */
3324 entry
= mas_walk(&mas
);
3325 MT_BUG_ON(mt
, entry
!= ptr
);
3326 MT_BUG_ON(mt
, mas
.index
!= 0);
3327 MT_BUG_ON(mt
, mas
.last
!= 0);
3328 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3330 /* walk: root -> none */
3332 entry
= mas_walk(&mas
);
3333 MT_BUG_ON(mt
, entry
!= NULL
);
3334 MT_BUG_ON(mt
, mas
.index
!= 1);
3335 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3336 MT_BUG_ON(mt
, mas
.status
!= ma_none
);
3338 /* walk: none -> root */
3339 mas
.index
= mas
.last
= 0;
3340 entry
= mas_walk(&mas
);
3341 MT_BUG_ON(mt
, entry
!= ptr
);
3342 MT_BUG_ON(mt
, mas
.index
!= 0);
3343 MT_BUG_ON(mt
, mas
.last
!= 0);
3344 MT_BUG_ON(mt
, mas
.status
!= ma_root
);
3348 /* Check when there is an actual node */
3349 mtree_store_range(mt
, 0, 0, NULL
, GFP_KERNEL
);
3350 mtree_store_range(mt
, 0x1000, 0x1500, ptr
, GFP_KERNEL
);
3351 mtree_store_range(mt
, 0x2000, 0x2500, ptr2
, GFP_KERNEL
);
3352 mtree_store_range(mt
, 0x3000, 0x3500, ptr3
, GFP_KERNEL
);
3356 /* next: start ->active */
3358 entry
= mas_next(&mas
, ULONG_MAX
);
3359 MT_BUG_ON(mt
, entry
!= ptr
);
3360 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3361 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3362 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3364 /* next: pause ->active */
3367 entry
= mas_next(&mas
, ULONG_MAX
);
3368 MT_BUG_ON(mt
, entry
!= ptr
);
3369 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3370 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3371 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3373 /* next: none ->active */
3374 mas
.index
= mas
.last
= 0;
3376 mas
.status
= ma_none
;
3377 entry
= mas_next(&mas
, ULONG_MAX
);
3378 MT_BUG_ON(mt
, entry
!= ptr
);
3379 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3380 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3381 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3383 /* next:active ->active (spanning limit) */
3384 entry
= mas_next(&mas
, 0x2100);
3385 MT_BUG_ON(mt
, entry
!= ptr2
);
3386 MT_BUG_ON(mt
, mas
.index
!= 0x2000);
3387 MT_BUG_ON(mt
, mas
.last
!= 0x2500);
3388 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3390 /* next:active -> overflow (limit reached) beyond data */
3391 entry
= mas_next(&mas
, 0x2999);
3392 MT_BUG_ON(mt
, entry
!= NULL
);
3393 MT_BUG_ON(mt
, mas
.index
!= 0x2501);
3394 MT_BUG_ON(mt
, mas
.last
!= 0x2fff);
3395 MT_BUG_ON(mt
, !mas_is_overflow(&mas
));
3397 /* next:overflow -> active (limit changed) */
3398 entry
= mas_next(&mas
, ULONG_MAX
);
3399 MT_BUG_ON(mt
, entry
!= ptr3
);
3400 MT_BUG_ON(mt
, mas
.index
!= 0x3000);
3401 MT_BUG_ON(mt
, mas
.last
!= 0x3500);
3402 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3404 /* next:active -> overflow (limit reached) */
3405 entry
= mas_next(&mas
, ULONG_MAX
);
3406 MT_BUG_ON(mt
, entry
!= NULL
);
3407 MT_BUG_ON(mt
, mas
.index
!= 0x3501);
3408 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3409 MT_BUG_ON(mt
, !mas_is_overflow(&mas
));
3411 /* next:overflow -> overflow */
3412 entry
= mas_next(&mas
, ULONG_MAX
);
3413 MT_BUG_ON(mt
, entry
!= NULL
);
3414 MT_BUG_ON(mt
, mas
.index
!= 0x3501);
3415 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3416 MT_BUG_ON(mt
, !mas_is_overflow(&mas
));
3418 /* prev:overflow -> active */
3419 entry
= mas_prev(&mas
, 0);
3420 MT_BUG_ON(mt
, entry
!= ptr3
);
3421 MT_BUG_ON(mt
, mas
.index
!= 0x3000);
3422 MT_BUG_ON(mt
, mas
.last
!= 0x3500);
3423 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3425 /* next: none -> active, skip value at location */
3427 entry
= mas_next(&mas
, ULONG_MAX
);
3428 mas
.status
= ma_none
;
3430 entry
= mas_next(&mas
, ULONG_MAX
);
3431 MT_BUG_ON(mt
, entry
!= ptr2
);
3432 MT_BUG_ON(mt
, mas
.index
!= 0x2000);
3433 MT_BUG_ON(mt
, mas
.last
!= 0x2500);
3434 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3436 /* prev:active ->active */
3437 entry
= mas_prev(&mas
, 0);
3438 MT_BUG_ON(mt
, entry
!= ptr
);
3439 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3440 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3441 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3443 /* prev:active -> underflow (span limit) */
3444 mas_next(&mas
, ULONG_MAX
);
3445 entry
= mas_prev(&mas
, 0x1200);
3446 MT_BUG_ON(mt
, entry
!= ptr
);
3447 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3448 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3449 MT_BUG_ON(mt
, !mas_is_active(&mas
)); /* spanning limit */
3450 entry
= mas_prev(&mas
, 0x1200); /* underflow */
3451 MT_BUG_ON(mt
, entry
!= NULL
);
3452 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3453 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3454 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3456 /* prev:underflow -> underflow (lower limit) spanning end range */
3457 entry
= mas_prev(&mas
, 0x0100);
3458 MT_BUG_ON(mt
, entry
!= NULL
);
3459 MT_BUG_ON(mt
, mas
.index
!= 0);
3460 MT_BUG_ON(mt
, mas
.last
!= 0x0FFF);
3461 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3463 /* prev:underflow -> underflow */
3464 entry
= mas_prev(&mas
, 0);
3465 MT_BUG_ON(mt
, entry
!= NULL
);
3466 MT_BUG_ON(mt
, mas
.index
!= 0);
3467 MT_BUG_ON(mt
, mas
.last
!= 0x0FFF);
3468 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3470 /* prev:underflow -> underflow */
3471 entry
= mas_prev(&mas
, 0);
3472 MT_BUG_ON(mt
, entry
!= NULL
);
3473 MT_BUG_ON(mt
, mas
.index
!= 0);
3474 MT_BUG_ON(mt
, mas
.last
!= 0x0FFF);
3475 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3477 /* next:underflow -> active */
3478 entry
= mas_next(&mas
, ULONG_MAX
);
3479 MT_BUG_ON(mt
, entry
!= ptr
);
3480 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3481 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3482 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3484 /* prev:first value -> underflow */
3485 entry
= mas_prev(&mas
, 0x1000);
3486 MT_BUG_ON(mt
, entry
!= NULL
);
3487 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3488 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3489 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3491 /* find:underflow -> first value */
3492 entry
= mas_find(&mas
, ULONG_MAX
);
3493 MT_BUG_ON(mt
, entry
!= ptr
);
3494 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3495 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3496 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3498 /* prev: pause ->active */
3499 mas_set(&mas
, 0x3600);
3500 entry
= mas_prev(&mas
, 0);
3501 MT_BUG_ON(mt
, entry
!= ptr3
);
3503 entry
= mas_prev(&mas
, 0);
3504 MT_BUG_ON(mt
, entry
!= ptr2
);
3505 MT_BUG_ON(mt
, mas
.index
!= 0x2000);
3506 MT_BUG_ON(mt
, mas
.last
!= 0x2500);
3507 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3509 /* prev:active -> underflow spanning min */
3510 entry
= mas_prev(&mas
, 0x1600);
3511 MT_BUG_ON(mt
, entry
!= NULL
);
3512 MT_BUG_ON(mt
, mas
.index
!= 0x1501);
3513 MT_BUG_ON(mt
, mas
.last
!= 0x1FFF);
3514 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3516 /* prev: active ->active, continue */
3517 entry
= mas_prev(&mas
, 0);
3518 MT_BUG_ON(mt
, entry
!= ptr
);
3519 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3520 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3521 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3523 /* find: start ->active */
3525 entry
= mas_find(&mas
, ULONG_MAX
);
3526 MT_BUG_ON(mt
, entry
!= ptr
);
3527 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3528 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3529 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3531 /* find: pause ->active */
3534 entry
= mas_find(&mas
, ULONG_MAX
);
3535 MT_BUG_ON(mt
, entry
!= ptr
);
3536 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3537 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3538 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3540 /* find: start ->active on value */;
3541 mas_set(&mas
, 1200);
3542 entry
= mas_find(&mas
, ULONG_MAX
);
3543 MT_BUG_ON(mt
, entry
!= ptr
);
3544 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3545 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3546 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3548 /* find:active ->active */
3549 entry
= mas_find(&mas
, ULONG_MAX
);
3550 MT_BUG_ON(mt
, entry
!= ptr2
);
3551 MT_BUG_ON(mt
, mas
.index
!= 0x2000);
3552 MT_BUG_ON(mt
, mas
.last
!= 0x2500);
3553 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3556 /* find:active -> active (NULL)*/
3557 entry
= mas_find(&mas
, 0x2700);
3558 MT_BUG_ON(mt
, entry
!= NULL
);
3559 MT_BUG_ON(mt
, mas
.index
!= 0x2501);
3560 MT_BUG_ON(mt
, mas
.last
!= 0x2FFF);
3561 MAS_BUG_ON(&mas
, !mas_is_active(&mas
));
3563 /* find: overflow ->active */
3564 entry
= mas_find(&mas
, 0x5000);
3565 MT_BUG_ON(mt
, entry
!= ptr3
);
3566 MT_BUG_ON(mt
, mas
.index
!= 0x3000);
3567 MT_BUG_ON(mt
, mas
.last
!= 0x3500);
3568 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3570 /* find:active -> active (NULL) end*/
3571 entry
= mas_find(&mas
, ULONG_MAX
);
3572 MT_BUG_ON(mt
, entry
!= NULL
);
3573 MT_BUG_ON(mt
, mas
.index
!= 0x3501);
3574 MT_BUG_ON(mt
, mas
.last
!= ULONG_MAX
);
3575 MAS_BUG_ON(&mas
, !mas_is_active(&mas
));
3577 /* find_rev: active (END) ->active */
3578 entry
= mas_find_rev(&mas
, 0);
3579 MT_BUG_ON(mt
, entry
!= ptr3
);
3580 MT_BUG_ON(mt
, mas
.index
!= 0x3000);
3581 MT_BUG_ON(mt
, mas
.last
!= 0x3500);
3582 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3584 /* find_rev:active ->active */
3585 entry
= mas_find_rev(&mas
, 0);
3586 MT_BUG_ON(mt
, entry
!= ptr2
);
3587 MT_BUG_ON(mt
, mas
.index
!= 0x2000);
3588 MT_BUG_ON(mt
, mas
.last
!= 0x2500);
3589 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3591 /* find_rev: pause ->active */
3593 entry
= mas_find_rev(&mas
, 0);
3594 MT_BUG_ON(mt
, entry
!= ptr
);
3595 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3596 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3597 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3599 /* find_rev:active -> underflow */
3600 entry
= mas_find_rev(&mas
, 0);
3601 MT_BUG_ON(mt
, entry
!= NULL
);
3602 MT_BUG_ON(mt
, mas
.index
!= 0);
3603 MT_BUG_ON(mt
, mas
.last
!= 0x0FFF);
3604 MT_BUG_ON(mt
, !mas_is_underflow(&mas
));
3606 /* find_rev: start ->active */
3607 mas_set(&mas
, 0x1200);
3608 entry
= mas_find_rev(&mas
, 0);
3609 MT_BUG_ON(mt
, entry
!= ptr
);
3610 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3611 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3612 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3614 /* mas_walk start ->active */
3615 mas_set(&mas
, 0x1200);
3616 entry
= mas_walk(&mas
);
3617 MT_BUG_ON(mt
, entry
!= ptr
);
3618 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3619 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3620 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3622 /* mas_walk start ->active */
3623 mas_set(&mas
, 0x1600);
3624 entry
= mas_walk(&mas
);
3625 MT_BUG_ON(mt
, entry
!= NULL
);
3626 MT_BUG_ON(mt
, mas
.index
!= 0x1501);
3627 MT_BUG_ON(mt
, mas
.last
!= 0x1fff);
3628 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3630 /* mas_walk pause ->active */
3631 mas_set(&mas
, 0x1200);
3633 entry
= mas_walk(&mas
);
3634 MT_BUG_ON(mt
, entry
!= ptr
);
3635 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3636 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3637 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3639 /* mas_walk pause -> active */
3640 mas_set(&mas
, 0x1600);
3642 entry
= mas_walk(&mas
);
3643 MT_BUG_ON(mt
, entry
!= NULL
);
3644 MT_BUG_ON(mt
, mas
.index
!= 0x1501);
3645 MT_BUG_ON(mt
, mas
.last
!= 0x1fff);
3646 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3648 /* mas_walk none -> active */
3649 mas_set(&mas
, 0x1200);
3650 mas
.status
= ma_none
;
3651 entry
= mas_walk(&mas
);
3652 MT_BUG_ON(mt
, entry
!= ptr
);
3653 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3654 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3655 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3657 /* mas_walk none -> active */
3658 mas_set(&mas
, 0x1600);
3659 mas
.status
= ma_none
;
3660 entry
= mas_walk(&mas
);
3661 MT_BUG_ON(mt
, entry
!= NULL
);
3662 MT_BUG_ON(mt
, mas
.index
!= 0x1501);
3663 MT_BUG_ON(mt
, mas
.last
!= 0x1fff);
3664 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3666 /* mas_walk active -> active */
3670 entry
= mas_walk(&mas
);
3671 MT_BUG_ON(mt
, entry
!= ptr
);
3672 MT_BUG_ON(mt
, mas
.index
!= 0x1000);
3673 MT_BUG_ON(mt
, mas
.last
!= 0x1500);
3674 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3676 /* mas_walk active -> active */
3679 entry
= mas_walk(&mas
);
3680 MT_BUG_ON(mt
, entry
!= NULL
);
3681 MT_BUG_ON(mt
, mas
.index
!= 0x1501);
3682 MT_BUG_ON(mt
, mas
.last
!= 0x1fff);
3683 MT_BUG_ON(mt
, !mas_is_active(&mas
));
3688 static noinline
void __init
alloc_cyclic_testing(struct maple_tree
*mt
)
3690 unsigned long location
;
3693 MA_STATE(mas
, mt
, 0, 0);
3697 for (int i
= 0; i
< 100; i
++) {
3698 mas_alloc_cyclic(&mas
, &location
, mt
, 2, ULONG_MAX
, &next
, GFP_KERNEL
);
3699 MAS_BUG_ON(&mas
, i
!= location
- 2);
3700 MAS_BUG_ON(&mas
, mas
.index
!= location
);
3701 MAS_BUG_ON(&mas
, mas
.last
!= location
);
3702 MAS_BUG_ON(&mas
, i
!= next
- 3);
3708 mt_init_flags(mt
, MT_FLAGS_ALLOC_RANGE
);
3709 for (int i
= 0; i
< 100; i
++) {
3710 mtree_alloc_cyclic(mt
, &location
, mt
, 2, ULONG_MAX
, &next
, GFP_KERNEL
);
3711 MT_BUG_ON(mt
, i
!= location
- 2);
3712 MT_BUG_ON(mt
, i
!= next
- 3);
3713 MT_BUG_ON(mt
, mtree_load(mt
, location
) != mt
);
3718 next
= ULONG_MAX
- 1;
3719 ret
= mtree_alloc_cyclic(mt
, &location
, mt
, 2, ULONG_MAX
, &next
, GFP_KERNEL
);
3720 MT_BUG_ON(mt
, ret
!= 0);
3721 ret
= mtree_alloc_cyclic(mt
, &location
, mt
, 2, ULONG_MAX
, &next
, GFP_KERNEL
);
3722 MT_BUG_ON(mt
, ret
!= 0);
3723 ret
= mtree_alloc_cyclic(mt
, &location
, mt
, 2, ULONG_MAX
, &next
, GFP_KERNEL
);
3724 MT_BUG_ON(mt
, ret
!= 1);
3727 static DEFINE_MTREE(tree
);
3728 static int __init
maple_tree_seed(void)
3730 unsigned long set
[] = { 5015, 5014, 5017, 25, 1000,
3731 1001, 1002, 1003, 1005, 0,
3735 pr_info("\nTEST STARTING\n\n");
3737 #if defined(BENCH_SLOT_STORE)
3739 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3740 bench_slot_store(&tree
);
3741 mtree_destroy(&tree
);
3744 #if defined(BENCH_NODE_STORE)
3746 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3747 bench_node_store(&tree
);
3748 mtree_destroy(&tree
);
3751 #if defined(BENCH_AWALK)
3753 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3755 mtree_destroy(&tree
);
3758 #if defined(BENCH_WALK)
3760 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3762 mtree_destroy(&tree
);
3765 #if defined(BENCH_LOAD)
3767 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3769 mtree_destroy(&tree
);
3772 #if defined(BENCH_FORK)
3777 #if defined(BENCH_MT_FOR_EACH)
3779 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3780 bench_mt_for_each(&tree
);
3781 mtree_destroy(&tree
);
3784 #if defined(BENCH_MAS_FOR_EACH)
3786 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3787 bench_mas_for_each(&tree
);
3788 mtree_destroy(&tree
);
3791 #if defined(BENCH_MAS_PREV)
3793 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3794 bench_mas_prev(&tree
);
3795 mtree_destroy(&tree
);
3799 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3800 check_store_null(&tree
);
3801 mtree_destroy(&tree
);
3803 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3804 check_root_expand(&tree
);
3805 mtree_destroy(&tree
);
3807 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3808 check_iteration(&tree
);
3809 mtree_destroy(&tree
);
3813 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3814 check_mas_store_gfp(&tree
);
3815 mtree_destroy(&tree
);
3817 /* Test ranges (store and insert) */
3818 mt_init_flags(&tree
, 0);
3819 check_ranges(&tree
);
3820 mtree_destroy(&tree
);
3822 #if defined(CONFIG_64BIT)
3823 /* These tests have ranges outside of 4GB */
3824 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3825 check_alloc_range(&tree
);
3826 mtree_destroy(&tree
);
3828 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3829 check_alloc_rev_range(&tree
);
3830 mtree_destroy(&tree
);
3833 mt_init_flags(&tree
, 0);
3835 check_load(&tree
, set
[0], NULL
); /* See if 5015 -> NULL */
3837 check_insert(&tree
, set
[9], &tree
); /* Insert 0 */
3838 check_load(&tree
, set
[9], &tree
); /* See if 0 -> &tree */
3839 check_load(&tree
, set
[0], NULL
); /* See if 5015 -> NULL */
3841 check_insert(&tree
, set
[10], ptr
); /* Insert 5003 */
3842 check_load(&tree
, set
[9], &tree
); /* See if 0 -> &tree */
3843 check_load(&tree
, set
[11], NULL
); /* See if 5002 -> NULL */
3844 check_load(&tree
, set
[10], ptr
); /* See if 5003 -> ptr */
3846 /* Clear out the tree */
3847 mtree_destroy(&tree
);
3849 /* Try to insert, insert a dup, and load back what was inserted. */
3850 mt_init_flags(&tree
, 0);
3851 check_insert(&tree
, set
[0], &tree
); /* Insert 5015 */
3852 check_dup_insert(&tree
, set
[0], &tree
); /* Insert 5015 again */
3853 check_load(&tree
, set
[0], &tree
); /* See if 5015 -> &tree */
3856 * Second set of tests try to load a value that doesn't exist, inserts
3857 * a second value, then loads the value again
3859 check_load(&tree
, set
[1], NULL
); /* See if 5014 -> NULL */
3860 check_insert(&tree
, set
[1], ptr
); /* insert 5014 -> ptr */
3861 check_load(&tree
, set
[1], ptr
); /* See if 5014 -> ptr */
3862 check_load(&tree
, set
[0], &tree
); /* See if 5015 -> &tree */
3864 * Tree currently contains:
3865 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3867 check_insert(&tree
, set
[6], ptr
); /* insert 1002 -> ptr */
3868 check_insert(&tree
, set
[7], &tree
); /* insert 1003 -> &tree */
3870 check_load(&tree
, set
[0], &tree
); /* See if 5015 -> &tree */
3871 check_load(&tree
, set
[1], ptr
); /* See if 5014 -> ptr */
3872 check_load(&tree
, set
[6], ptr
); /* See if 1002 -> ptr */
3873 check_load(&tree
, set
[7], &tree
); /* 1003 = &tree ? */
3875 /* Clear out tree */
3876 mtree_destroy(&tree
);
3878 mt_init_flags(&tree
, 0);
3879 /* Test inserting into a NULL hole. */
3880 check_insert(&tree
, set
[5], ptr
); /* insert 1001 -> ptr */
3881 check_insert(&tree
, set
[7], &tree
); /* insert 1003 -> &tree */
3882 check_insert(&tree
, set
[6], ptr
); /* insert 1002 -> ptr */
3883 check_load(&tree
, set
[5], ptr
); /* See if 1001 -> ptr */
3884 check_load(&tree
, set
[6], ptr
); /* See if 1002 -> ptr */
3885 check_load(&tree
, set
[7], &tree
); /* See if 1003 -> &tree */
3887 /* Clear out the tree */
3888 mtree_destroy(&tree
);
3890 mt_init_flags(&tree
, 0);
3892 * set[] = {5015, 5014, 5017, 25, 1000,
3893 * 1001, 1002, 1003, 1005, 0,
3897 check_insert(&tree
, set
[0], ptr
); /* 5015 */
3898 check_insert(&tree
, set
[1], &tree
); /* 5014 */
3899 check_insert(&tree
, set
[2], ptr
); /* 5017 */
3900 check_insert(&tree
, set
[3], &tree
); /* 25 */
3901 check_load(&tree
, set
[0], ptr
);
3902 check_load(&tree
, set
[1], &tree
);
3903 check_load(&tree
, set
[2], ptr
);
3904 check_load(&tree
, set
[3], &tree
);
3905 check_insert(&tree
, set
[4], ptr
); /* 1000 < Should split. */
3906 check_load(&tree
, set
[0], ptr
);
3907 check_load(&tree
, set
[1], &tree
);
3908 check_load(&tree
, set
[2], ptr
);
3909 check_load(&tree
, set
[3], &tree
); /*25 */
3910 check_load(&tree
, set
[4], ptr
);
3911 check_insert(&tree
, set
[5], &tree
); /* 1001 */
3912 check_load(&tree
, set
[0], ptr
);
3913 check_load(&tree
, set
[1], &tree
);
3914 check_load(&tree
, set
[2], ptr
);
3915 check_load(&tree
, set
[3], &tree
);
3916 check_load(&tree
, set
[4], ptr
);
3917 check_load(&tree
, set
[5], &tree
);
3918 check_insert(&tree
, set
[6], ptr
);
3919 check_load(&tree
, set
[0], ptr
);
3920 check_load(&tree
, set
[1], &tree
);
3921 check_load(&tree
, set
[2], ptr
);
3922 check_load(&tree
, set
[3], &tree
);
3923 check_load(&tree
, set
[4], ptr
);
3924 check_load(&tree
, set
[5], &tree
);
3925 check_load(&tree
, set
[6], ptr
);
3926 check_insert(&tree
, set
[7], &tree
);
3927 check_load(&tree
, set
[0], ptr
);
3928 check_insert(&tree
, set
[8], ptr
);
3930 check_insert(&tree
, set
[9], &tree
);
3932 check_load(&tree
, set
[0], ptr
);
3933 check_load(&tree
, set
[1], &tree
);
3934 check_load(&tree
, set
[2], ptr
);
3935 check_load(&tree
, set
[3], &tree
);
3936 check_load(&tree
, set
[4], ptr
);
3937 check_load(&tree
, set
[5], &tree
);
3938 check_load(&tree
, set
[6], ptr
);
3939 check_load(&tree
, set
[9], &tree
);
3940 mtree_destroy(&tree
);
3942 mt_init_flags(&tree
, 0);
3943 check_seq(&tree
, 16, false);
3944 mtree_destroy(&tree
);
3946 mt_init_flags(&tree
, 0);
3947 check_seq(&tree
, 1000, true);
3948 mtree_destroy(&tree
);
3950 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3951 check_rev_seq(&tree
, 1000, true);
3952 mtree_destroy(&tree
);
3954 check_lower_bound_split(&tree
);
3955 check_upper_bound_split(&tree
);
3956 check_mid_split(&tree
);
3958 mt_init_flags(&tree
, 0);
3959 check_next_entry(&tree
);
3961 check_find_2(&tree
);
3962 mtree_destroy(&tree
);
3964 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3965 check_prev_entry(&tree
);
3966 mtree_destroy(&tree
);
3968 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3969 check_gap_combining(&tree
);
3970 mtree_destroy(&tree
);
3972 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3973 check_node_overwrite(&tree
);
3974 mtree_destroy(&tree
);
3976 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3977 next_prev_test(&tree
);
3978 mtree_destroy(&tree
);
3980 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3981 check_spanning_relatives(&tree
);
3982 mtree_destroy(&tree
);
3984 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3985 check_rev_find(&tree
);
3986 mtree_destroy(&tree
);
3988 mt_init_flags(&tree
, 0);
3989 check_fuzzer(&tree
);
3990 mtree_destroy(&tree
);
3992 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3994 mtree_destroy(&tree
);
3996 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
3997 check_bnode_min_spanning(&tree
);
3998 mtree_destroy(&tree
);
4000 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
4001 check_empty_area_window(&tree
);
4002 mtree_destroy(&tree
);
4004 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
4005 check_empty_area_fill(&tree
);
4006 mtree_destroy(&tree
);
4008 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
4009 check_state_handling(&tree
);
4010 mtree_destroy(&tree
);
4012 mt_init_flags(&tree
, MT_FLAGS_ALLOC_RANGE
);
4013 alloc_cyclic_testing(&tree
);
4014 mtree_destroy(&tree
);
4021 pr_info("maple_tree: %u of %u tests passed\n",
4022 atomic_read(&maple_tree_tests_passed
),
4023 atomic_read(&maple_tree_tests_run
));
4024 if (atomic_read(&maple_tree_tests_run
) ==
4025 atomic_read(&maple_tree_tests_passed
))
4031 static void __exit
maple_tree_harvest(void)
4036 module_init(maple_tree_seed
);
4037 module_exit(maple_tree_harvest
);
4038 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
4039 MODULE_DESCRIPTION("maple tree API test module");
4040 MODULE_LICENSE("GPL");