2 * multiorder.c: Multi-order radix tree entry testing
3 * Copyright (c) 2016 Intel Corporation
4 * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
5 * Author: Matthew Wilcox <matthew.r.wilcox@intel.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 #include <linux/radix-tree.h>
17 #include <linux/slab.h>
18 #include <linux/errno.h>
23 #define for_each_index(i, base, order) \
24 for (i = base; i < base + (1 << order); i++)
26 static void __multiorder_tag_test(int index
, int order
)
28 RADIX_TREE(tree
, GFP_KERNEL
);
31 /* our canonical entry */
32 base
= index
& ~((1 << order
) - 1);
34 printv(2, "Multiorder tag test with index %d, canonical entry %d\n",
37 err
= item_insert_order(&tree
, index
, order
);
41 * Verify we get collisions for covered indices. We try and fail to
42 * insert an exceptional entry so we don't leak memory via
43 * item_insert_order().
45 for_each_index(i
, base
, order
) {
46 err
= __radix_tree_insert(&tree
, i
, order
,
47 (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY
));
48 assert(err
== -EEXIST
);
51 for_each_index(i
, base
, order
) {
52 assert(!radix_tree_tag_get(&tree
, i
, 0));
53 assert(!radix_tree_tag_get(&tree
, i
, 1));
56 assert(radix_tree_tag_set(&tree
, index
, 0));
58 for_each_index(i
, base
, order
) {
59 assert(radix_tree_tag_get(&tree
, i
, 0));
60 assert(!radix_tree_tag_get(&tree
, i
, 1));
63 assert(tag_tagged_items(&tree
, NULL
, 0, ~0UL, 10, 0, 1) == 1);
64 assert(radix_tree_tag_clear(&tree
, index
, 0));
66 for_each_index(i
, base
, order
) {
67 assert(!radix_tree_tag_get(&tree
, i
, 0));
68 assert(radix_tree_tag_get(&tree
, i
, 1));
71 assert(radix_tree_tag_clear(&tree
, index
, 1));
73 assert(!radix_tree_tagged(&tree
, 0));
74 assert(!radix_tree_tagged(&tree
, 1));
76 item_kill_tree(&tree
);
79 static void __multiorder_tag_test2(unsigned order
, unsigned long index2
)
81 RADIX_TREE(tree
, GFP_KERNEL
);
82 unsigned long index
= (1 << order
);
85 assert(item_insert_order(&tree
, 0, order
) == 0);
86 assert(item_insert(&tree
, index2
) == 0);
88 assert(radix_tree_tag_set(&tree
, 0, 0));
89 assert(radix_tree_tag_set(&tree
, index2
, 0));
91 assert(tag_tagged_items(&tree
, NULL
, 0, ~0UL, 10, 0, 1) == 2);
93 item_kill_tree(&tree
);
96 static void multiorder_tag_tests(void)
100 /* test multi-order entry for indices 0-7 with no sibling pointers */
101 __multiorder_tag_test(0, 3);
102 __multiorder_tag_test(5, 3);
104 /* test multi-order entry for indices 8-15 with no sibling pointers */
105 __multiorder_tag_test(8, 3);
106 __multiorder_tag_test(15, 3);
109 * Our order 5 entry covers indices 0-31 in a tree with height=2.
110 * This is broken up as follows:
111 * 0-7: canonical entry
116 __multiorder_tag_test(0, 5);
117 __multiorder_tag_test(29, 5);
119 /* same test, but with indices 32-63 */
120 __multiorder_tag_test(32, 5);
121 __multiorder_tag_test(44, 5);
124 * Our order 8 entry covers indices 0-255 in a tree with height=3.
125 * This is broken up as follows:
126 * 0-63: canonical entry
131 __multiorder_tag_test(0, 8);
132 __multiorder_tag_test(190, 8);
134 /* same test, but with indices 256-511 */
135 __multiorder_tag_test(256, 8);
136 __multiorder_tag_test(300, 8);
138 __multiorder_tag_test(0x12345678UL
, 8);
140 for (i
= 1; i
< 10; i
++)
141 for (j
= 0; j
< (10 << i
); j
++)
142 __multiorder_tag_test2(i
, j
);
145 static void multiorder_check(unsigned long index
, int order
)
148 unsigned long min
= index
& ~((1UL << order
) - 1);
149 unsigned long max
= min
+ (1UL << order
);
151 struct item
*item2
= item_create(min
, order
);
152 RADIX_TREE(tree
, GFP_KERNEL
);
154 printv(2, "Multiorder index %ld, order %d\n", index
, order
);
156 assert(item_insert_order(&tree
, index
, order
) == 0);
158 for (i
= min
; i
< max
; i
++) {
159 struct item
*item
= item_lookup(&tree
, i
);
161 assert(item
->index
== index
);
163 for (i
= 0; i
< min
; i
++)
164 item_check_absent(&tree
, i
);
165 for (i
= max
; i
< 2*max
; i
++)
166 item_check_absent(&tree
, i
);
167 for (i
= min
; i
< max
; i
++)
168 assert(radix_tree_insert(&tree
, i
, item2
) == -EEXIST
);
170 slot
= radix_tree_lookup_slot(&tree
, index
);
172 radix_tree_replace_slot(&tree
, slot
, item2
);
173 for (i
= min
; i
< max
; i
++) {
174 struct item
*item
= item_lookup(&tree
, i
);
176 assert(item
->index
== min
);
179 assert(item_delete(&tree
, min
) != 0);
181 for (i
= 0; i
< 2*max
; i
++)
182 item_check_absent(&tree
, i
);
185 static void multiorder_shrink(unsigned long index
, int order
)
188 unsigned long max
= 1 << order
;
189 RADIX_TREE(tree
, GFP_KERNEL
);
190 struct radix_tree_node
*node
;
192 printv(2, "Multiorder shrink index %ld, order %d\n", index
, order
);
194 assert(item_insert_order(&tree
, 0, order
) == 0);
198 assert(item_insert(&tree
, index
) == 0);
199 assert(node
!= tree
.rnode
);
201 assert(item_delete(&tree
, index
) != 0);
202 assert(node
== tree
.rnode
);
204 for (i
= 0; i
< max
; i
++) {
205 struct item
*item
= item_lookup(&tree
, i
);
207 assert(item
->index
== 0);
209 for (i
= max
; i
< 2*max
; i
++)
210 item_check_absent(&tree
, i
);
212 if (!item_delete(&tree
, 0)) {
213 printv(2, "failed to delete index %ld (order %d)\n", index
, order
);
217 for (i
= 0; i
< 2*max
; i
++)
218 item_check_absent(&tree
, i
);
221 static void multiorder_insert_bug(void)
223 RADIX_TREE(tree
, GFP_KERNEL
);
225 item_insert(&tree
, 0);
226 radix_tree_tag_set(&tree
, 0, 0);
227 item_insert_order(&tree
, 3 << 6, 6);
229 item_kill_tree(&tree
);
232 void multiorder_iteration(void)
234 RADIX_TREE(tree
, GFP_KERNEL
);
235 struct radix_tree_iter iter
;
239 printv(1, "Multiorder iteration test\n");
241 #define NUM_ENTRIES 11
242 int index
[NUM_ENTRIES
] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128};
243 int order
[NUM_ENTRIES
] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7};
245 for (i
= 0; i
< NUM_ENTRIES
; i
++) {
246 err
= item_insert_order(&tree
, index
[i
], order
[i
]);
250 for (j
= 0; j
< 256; j
++) {
251 for (i
= 0; i
< NUM_ENTRIES
; i
++)
252 if (j
<= (index
[i
] | ((1 << order
[i
]) - 1)))
255 radix_tree_for_each_slot(slot
, &tree
, &iter
, j
) {
256 int height
= order
[i
] / RADIX_TREE_MAP_SHIFT
;
257 int shift
= height
* RADIX_TREE_MAP_SHIFT
;
258 unsigned long mask
= (1UL << order
[i
]) - 1;
259 struct item
*item
= *slot
;
261 assert((iter
.index
| mask
) == (index
[i
] | mask
));
262 assert(iter
.shift
== shift
);
263 assert(!radix_tree_is_internal_node(item
));
264 assert((item
->index
| mask
) == (index
[i
] | mask
));
265 assert(item
->order
== order
[i
]);
270 item_kill_tree(&tree
);
273 void multiorder_tagged_iteration(void)
275 RADIX_TREE(tree
, GFP_KERNEL
);
276 struct radix_tree_iter iter
;
280 printv(1, "Multiorder tagged iteration test\n");
282 #define MT_NUM_ENTRIES 9
283 int index
[MT_NUM_ENTRIES
] = {0, 2, 4, 16, 32, 40, 64, 72, 128};
284 int order
[MT_NUM_ENTRIES
] = {1, 0, 2, 4, 3, 1, 3, 0, 7};
286 #define TAG_ENTRIES 7
287 int tag_index
[TAG_ENTRIES
] = {0, 4, 16, 40, 64, 72, 128};
289 for (i
= 0; i
< MT_NUM_ENTRIES
; i
++)
290 assert(!item_insert_order(&tree
, index
[i
], order
[i
]));
292 assert(!radix_tree_tagged(&tree
, 1));
294 for (i
= 0; i
< TAG_ENTRIES
; i
++)
295 assert(radix_tree_tag_set(&tree
, tag_index
[i
], 1));
297 for (j
= 0; j
< 256; j
++) {
300 for (i
= 0; i
< TAG_ENTRIES
; i
++) {
301 for (k
= i
; index
[k
] < tag_index
[i
]; k
++)
303 if (j
<= (index
[k
] | ((1 << order
[k
]) - 1)))
307 radix_tree_for_each_tagged(slot
, &tree
, &iter
, j
, 1) {
309 struct item
*item
= *slot
;
310 for (k
= i
; index
[k
] < tag_index
[i
]; k
++)
312 mask
= (1UL << order
[k
]) - 1;
314 assert((iter
.index
| mask
) == (tag_index
[i
] | mask
));
315 assert(!radix_tree_is_internal_node(item
));
316 assert((item
->index
| mask
) == (tag_index
[i
] | mask
));
317 assert(item
->order
== order
[k
]);
322 assert(tag_tagged_items(&tree
, NULL
, 0, ~0UL, TAG_ENTRIES
, 1, 2) ==
325 for (j
= 0; j
< 256; j
++) {
328 for (i
= 0; i
< TAG_ENTRIES
; i
++) {
329 for (k
= i
; index
[k
] < tag_index
[i
]; k
++)
331 if (j
<= (index
[k
] | ((1 << order
[k
]) - 1)))
335 radix_tree_for_each_tagged(slot
, &tree
, &iter
, j
, 2) {
336 struct item
*item
= *slot
;
337 for (k
= i
; index
[k
] < tag_index
[i
]; k
++)
339 mask
= (1 << order
[k
]) - 1;
341 assert((iter
.index
| mask
) == (tag_index
[i
] | mask
));
342 assert(!radix_tree_is_internal_node(item
));
343 assert((item
->index
| mask
) == (tag_index
[i
] | mask
));
344 assert(item
->order
== order
[k
]);
349 assert(tag_tagged_items(&tree
, NULL
, 1, ~0UL, MT_NUM_ENTRIES
* 2, 1, 0)
352 radix_tree_for_each_tagged(slot
, &tree
, &iter
, 0, 0) {
353 assert(iter
.index
== tag_index
[i
]);
357 item_kill_tree(&tree
);
361 * Basic join checks: make sure we can't find an entry in the tree after
362 * a larger entry has replaced it
364 static void multiorder_join1(unsigned long index
,
365 unsigned order1
, unsigned order2
)
368 void *item
, *item2
= item_create(index
+ 1, order1
);
369 RADIX_TREE(tree
, GFP_KERNEL
);
371 item_insert_order(&tree
, index
, order2
);
372 item
= radix_tree_lookup(&tree
, index
);
373 radix_tree_join(&tree
, index
+ 1, order1
, item2
);
374 loc
= find_item(&tree
, item
);
377 item
= radix_tree_lookup(&tree
, index
+ 1);
378 assert(item
== item2
);
379 item_kill_tree(&tree
);
383 * Check that the accounting of exceptional entries is handled correctly
384 * by joining an exceptional entry to a normal pointer.
386 static void multiorder_join2(unsigned order1
, unsigned order2
)
388 RADIX_TREE(tree
, GFP_KERNEL
);
389 struct radix_tree_node
*node
;
390 void *item1
= item_create(0, order1
);
393 item_insert_order(&tree
, 0, order2
);
394 radix_tree_insert(&tree
, 1 << order2
, (void *)0x12UL
);
395 item2
= __radix_tree_lookup(&tree
, 1 << order2
, &node
, NULL
);
396 assert(item2
== (void *)0x12UL
);
397 assert(node
->exceptional
== 1);
399 item2
= radix_tree_lookup(&tree
, 0);
402 radix_tree_join(&tree
, 0, order1
, item1
);
403 item2
= __radix_tree_lookup(&tree
, 1 << order2
, &node
, NULL
);
404 assert(item2
== item1
);
405 assert(node
->exceptional
== 0);
406 item_kill_tree(&tree
);
410 * This test revealed an accounting bug for exceptional entries at one point.
411 * Nodes were being freed back into the pool with an elevated exception count
412 * by radix_tree_join() and then radix_tree_split() was failing to zero the
413 * count of exceptional entries.
415 static void multiorder_join3(unsigned int order
)
417 RADIX_TREE(tree
, GFP_KERNEL
);
418 struct radix_tree_node
*node
;
420 struct radix_tree_iter iter
;
423 for (i
= 0; i
< (1 << order
); i
++) {
424 radix_tree_insert(&tree
, i
, (void *)0x12UL
);
427 radix_tree_join(&tree
, 0, order
, (void *)0x16UL
);
430 radix_tree_split(&tree
, 0, 0);
432 radix_tree_for_each_slot(slot
, &tree
, &iter
, 0) {
433 radix_tree_iter_replace(&tree
, &iter
, slot
, (void *)0x12UL
);
436 __radix_tree_lookup(&tree
, 0, &node
, NULL
);
437 assert(node
->exceptional
== node
->count
);
439 item_kill_tree(&tree
);
442 static void multiorder_join(void)
446 for (idx
= 0; idx
< 1024; idx
= idx
* 2 + 3) {
447 for (i
= 1; i
< 15; i
++) {
448 for (j
= 0; j
< i
; j
++) {
449 multiorder_join1(idx
, i
, j
);
454 for (i
= 1; i
< 15; i
++) {
455 for (j
= 0; j
< i
; j
++) {
456 multiorder_join2(i
, j
);
460 for (i
= 3; i
< 10; i
++) {
465 static void check_mem(unsigned old_order
, unsigned new_order
, unsigned alloc
)
467 struct radix_tree_preload
*rtp
= &radix_tree_preloads
;
469 printv(2, "split(%u %u) remaining %u\n", old_order
, new_order
,
472 * Can't check for equality here as some nodes may have been
473 * RCU-freed while we ran. But we should never finish with more
474 * nodes allocated since they should have all been preloaded.
476 if (nr_allocated
> alloc
)
477 printv(2, "split(%u %u) allocated %u %u\n", old_order
, new_order
,
478 alloc
, nr_allocated
);
481 static void __multiorder_split(int old_order
, int new_order
)
483 RADIX_TREE(tree
, GFP_ATOMIC
);
485 struct radix_tree_iter iter
;
489 radix_tree_preload(GFP_KERNEL
);
490 assert(item_insert_order(&tree
, 0, old_order
) == 0);
491 radix_tree_preload_end();
493 /* Wipe out the preloaded cache or it'll confuse check_mem() */
494 radix_tree_cpu_dead(0);
496 item
= radix_tree_tag_set(&tree
, 0, 2);
498 radix_tree_split_preload(old_order
, new_order
, GFP_KERNEL
);
499 alloc
= nr_allocated
;
500 radix_tree_split(&tree
, 0, new_order
);
501 check_mem(old_order
, new_order
, alloc
);
502 radix_tree_for_each_slot(slot
, &tree
, &iter
, 0) {
503 radix_tree_iter_replace(&tree
, &iter
, slot
,
504 item_create(iter
.index
, new_order
));
506 radix_tree_preload_end();
508 item_kill_tree(&tree
);
512 static void __multiorder_split2(int old_order
, int new_order
)
514 RADIX_TREE(tree
, GFP_KERNEL
);
516 struct radix_tree_iter iter
;
517 struct radix_tree_node
*node
;
520 __radix_tree_insert(&tree
, 0, old_order
, (void *)0x12);
522 item
= __radix_tree_lookup(&tree
, 0, &node
, NULL
);
523 assert(item
== (void *)0x12);
524 assert(node
->exceptional
> 0);
526 radix_tree_split(&tree
, 0, new_order
);
527 radix_tree_for_each_slot(slot
, &tree
, &iter
, 0) {
528 radix_tree_iter_replace(&tree
, &iter
, slot
,
529 item_create(iter
.index
, new_order
));
532 item
= __radix_tree_lookup(&tree
, 0, &node
, NULL
);
533 assert(item
!= (void *)0x12);
534 assert(node
->exceptional
== 0);
536 item_kill_tree(&tree
);
539 static void __multiorder_split3(int old_order
, int new_order
)
541 RADIX_TREE(tree
, GFP_KERNEL
);
543 struct radix_tree_iter iter
;
544 struct radix_tree_node
*node
;
547 __radix_tree_insert(&tree
, 0, old_order
, (void *)0x12);
549 item
= __radix_tree_lookup(&tree
, 0, &node
, NULL
);
550 assert(item
== (void *)0x12);
551 assert(node
->exceptional
> 0);
553 radix_tree_split(&tree
, 0, new_order
);
554 radix_tree_for_each_slot(slot
, &tree
, &iter
, 0) {
555 radix_tree_iter_replace(&tree
, &iter
, slot
, (void *)0x16);
558 item
= __radix_tree_lookup(&tree
, 0, &node
, NULL
);
559 assert(item
== (void *)0x16);
560 assert(node
->exceptional
> 0);
562 item_kill_tree(&tree
);
564 __radix_tree_insert(&tree
, 0, old_order
, (void *)0x12);
566 item
= __radix_tree_lookup(&tree
, 0, &node
, NULL
);
567 assert(item
== (void *)0x12);
568 assert(node
->exceptional
> 0);
570 radix_tree_split(&tree
, 0, new_order
);
571 radix_tree_for_each_slot(slot
, &tree
, &iter
, 0) {
572 if (iter
.index
== (1 << new_order
))
573 radix_tree_iter_replace(&tree
, &iter
, slot
,
576 radix_tree_iter_replace(&tree
, &iter
, slot
, NULL
);
579 item
= __radix_tree_lookup(&tree
, 1 << new_order
, &node
, NULL
);
580 assert(item
== (void *)0x16);
581 assert(node
->count
== node
->exceptional
);
586 assert(node
->count
== 1);
587 assert(node
->exceptional
== 0);
590 item_kill_tree(&tree
);
593 static void multiorder_split(void)
597 for (i
= 3; i
< 11; i
++)
598 for (j
= 0; j
< i
; j
++) {
599 __multiorder_split(i
, j
);
600 __multiorder_split2(i
, j
);
601 __multiorder_split3(i
, j
);
605 static void multiorder_account(void)
607 RADIX_TREE(tree
, GFP_KERNEL
);
608 struct radix_tree_node
*node
;
611 item_insert_order(&tree
, 0, 5);
613 __radix_tree_insert(&tree
, 1 << 5, 5, (void *)0x12);
614 __radix_tree_lookup(&tree
, 0, &node
, NULL
);
615 assert(node
->count
== node
->exceptional
* 2);
616 radix_tree_delete(&tree
, 1 << 5);
617 assert(node
->exceptional
== 0);
619 __radix_tree_insert(&tree
, 1 << 5, 5, (void *)0x12);
620 __radix_tree_lookup(&tree
, 1 << 5, &node
, &slot
);
621 assert(node
->count
== node
->exceptional
* 2);
622 __radix_tree_replace(&tree
, node
, slot
, NULL
, NULL
);
623 assert(node
->exceptional
== 0);
625 item_kill_tree(&tree
);
628 bool stop_iteration
= false;
630 static void *creator_func(void *ptr
)
632 /* 'order' is set up to ensure we have sibling entries */
633 unsigned int order
= RADIX_TREE_MAP_SHIFT
- 1;
634 struct radix_tree_root
*tree
= ptr
;
637 for (i
= 0; i
< 10000; i
++) {
638 item_insert_order(tree
, 0, order
);
639 item_delete_rcu(tree
, 0);
642 stop_iteration
= true;
646 static void *iterator_func(void *ptr
)
648 struct radix_tree_root
*tree
= ptr
;
649 struct radix_tree_iter iter
;
653 while (!stop_iteration
) {
655 radix_tree_for_each_slot(slot
, tree
, &iter
, 0) {
656 item
= radix_tree_deref_slot(slot
);
660 if (radix_tree_deref_retry(item
)) {
661 slot
= radix_tree_iter_retry(&iter
);
665 item_sanity(item
, iter
.index
);
672 static void multiorder_iteration_race(void)
674 const int num_threads
= sysconf(_SC_NPROCESSORS_ONLN
);
675 pthread_t worker_thread
[num_threads
];
676 RADIX_TREE(tree
, GFP_KERNEL
);
679 pthread_create(&worker_thread
[0], NULL
, &creator_func
, &tree
);
680 for (i
= 1; i
< num_threads
; i
++)
681 pthread_create(&worker_thread
[i
], NULL
, &iterator_func
, &tree
);
683 for (i
= 0; i
< num_threads
; i
++)
684 pthread_join(worker_thread
[i
], NULL
);
686 item_kill_tree(&tree
);
689 void multiorder_checks(void)
693 for (i
= 0; i
< 20; i
++) {
694 multiorder_check(200, i
);
695 multiorder_check(0, i
);
696 multiorder_check((1UL << i
) + 1, i
);
699 for (i
= 0; i
< 15; i
++)
700 multiorder_shrink((1UL << (i
+ RADIX_TREE_MAP_SHIFT
)), i
);
702 multiorder_insert_bug();
703 multiorder_tag_tests();
704 multiorder_iteration();
705 multiorder_tagged_iteration();
708 multiorder_account();
709 multiorder_iteration_race();
711 radix_tree_cpu_dead(0);
714 int __weak
main(void)