1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <[email protected]>
6 *
7 * Any tests that only require the interface of the tree.
8 */
9
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
13
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt) do {} while (0)
20 #define mt_validate(mt) do {} while (0)
21 #define mt_cache_shrink() do {} while (0)
22 #define mas_dump(mas) do {} while (0)
23 #define mas_wr_dump(mas) do {} while (0)
24 atomic_t maple_tree_tests_run;
25 atomic_t maple_tree_tests_passed;
26 #undef MT_BUG_ON
27
28 #define MT_BUG_ON(__tree, __x) do { \
29 atomic_inc(&maple_tree_tests_run); \
30 if (__x) { \
31 pr_info("BUG at %s:%d (%u)\n", \
32 __func__, __LINE__, __x); \
33 pr_info("Pass: %u Run:%u\n", \
34 atomic_read(&maple_tree_tests_passed), \
35 atomic_read(&maple_tree_tests_run)); \
36 } else { \
37 atomic_inc(&maple_tree_tests_passed); \
38 } \
39 } while (0)
40 #endif
41
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_LOAD */
47 /* #define BENCH_MT_FOR_EACH */
48 /* #define BENCH_FORK */
49 /* #define BENCH_MAS_FOR_EACH */
50 /* #define BENCH_MAS_PREV */
51
52 #ifdef __KERNEL__
53 #define mt_set_non_kernel(x) do {} while (0)
54 #define mt_zero_nr_tallocated(x) do {} while (0)
55 #else
56 #define cond_resched() do {} while (0)
57 #endif
58
59 #define mas_is_none(x) ((x)->status == ma_none)
60 #define mas_is_overflow(x) ((x)->status == ma_overflow)
61 #define mas_is_underflow(x) ((x)->status == ma_underflow)
62
mtree_insert_index(struct maple_tree * mt,unsigned long index,gfp_t gfp)63 static int __init mtree_insert_index(struct maple_tree *mt,
64 unsigned long index, gfp_t gfp)
65 {
66 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
67 }
68
mtree_erase_index(struct maple_tree * mt,unsigned long index)69 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
70 {
71 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
72 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
73 }
74
mtree_test_insert(struct maple_tree * mt,unsigned long index,void * ptr)75 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
76 void *ptr)
77 {
78 return mtree_insert(mt, index, ptr, GFP_KERNEL);
79 }
80
mtree_test_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)81 static int __init mtree_test_store_range(struct maple_tree *mt,
82 unsigned long start, unsigned long end, void *ptr)
83 {
84 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
85 }
86
mtree_test_store(struct maple_tree * mt,unsigned long start,void * ptr)87 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
88 void *ptr)
89 {
90 return mtree_test_store_range(mt, start, start, ptr);
91 }
92
mtree_test_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)93 static int __init mtree_test_insert_range(struct maple_tree *mt,
94 unsigned long start, unsigned long end, void *ptr)
95 {
96 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
97 }
98
mtree_test_load(struct maple_tree * mt,unsigned long index)99 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
100 {
101 return mtree_load(mt, index);
102 }
103
mtree_test_erase(struct maple_tree * mt,unsigned long index)104 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
105 {
106 return mtree_erase(mt, index);
107 }
108
109 #if defined(CONFIG_64BIT)
check_mtree_alloc_range(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)110 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
111 unsigned long start, unsigned long end, unsigned long size,
112 unsigned long expected, int eret, void *ptr)
113 {
114
115 unsigned long result = expected + 1;
116 int ret;
117
118 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
119 GFP_KERNEL);
120 MT_BUG_ON(mt, ret != eret);
121 if (ret)
122 return;
123
124 MT_BUG_ON(mt, result != expected);
125 }
126
check_mtree_alloc_rrange(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)127 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
128 unsigned long start, unsigned long end, unsigned long size,
129 unsigned long expected, int eret, void *ptr)
130 {
131
132 unsigned long result = expected + 1;
133 int ret;
134
135 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
136 GFP_KERNEL);
137 MT_BUG_ON(mt, ret != eret);
138 if (ret)
139 return;
140
141 MT_BUG_ON(mt, result != expected);
142 }
143 #endif
144
check_load(struct maple_tree * mt,unsigned long index,void * ptr)145 static noinline void __init check_load(struct maple_tree *mt,
146 unsigned long index, void *ptr)
147 {
148 void *ret = mtree_test_load(mt, index);
149
150 if (ret != ptr)
151 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
152 MT_BUG_ON(mt, ret != ptr);
153 }
154
check_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)155 static noinline void __init check_store_range(struct maple_tree *mt,
156 unsigned long start, unsigned long end, void *ptr, int expected)
157 {
158 int ret = -EINVAL;
159 unsigned long i;
160
161 ret = mtree_test_store_range(mt, start, end, ptr);
162 MT_BUG_ON(mt, ret != expected);
163
164 if (ret)
165 return;
166
167 for (i = start; i <= end; i++)
168 check_load(mt, i, ptr);
169 }
170
check_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)171 static noinline void __init check_insert_range(struct maple_tree *mt,
172 unsigned long start, unsigned long end, void *ptr, int expected)
173 {
174 int ret = -EINVAL;
175 unsigned long i;
176
177 ret = mtree_test_insert_range(mt, start, end, ptr);
178 MT_BUG_ON(mt, ret != expected);
179
180 if (ret)
181 return;
182
183 for (i = start; i <= end; i++)
184 check_load(mt, i, ptr);
185 }
186
check_insert(struct maple_tree * mt,unsigned long index,void * ptr)187 static noinline void __init check_insert(struct maple_tree *mt,
188 unsigned long index, void *ptr)
189 {
190 int ret = -EINVAL;
191
192 ret = mtree_test_insert(mt, index, ptr);
193 MT_BUG_ON(mt, ret != 0);
194 }
195
check_dup_insert(struct maple_tree * mt,unsigned long index,void * ptr)196 static noinline void __init check_dup_insert(struct maple_tree *mt,
197 unsigned long index, void *ptr)
198 {
199 int ret = -EINVAL;
200
201 ret = mtree_test_insert(mt, index, ptr);
202 MT_BUG_ON(mt, ret != -EEXIST);
203 }
204
205
check_index_load(struct maple_tree * mt,unsigned long index)206 static noinline void __init check_index_load(struct maple_tree *mt,
207 unsigned long index)
208 {
209 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
210 }
211
not_empty(struct maple_node * node)212 static inline __init int not_empty(struct maple_node *node)
213 {
214 int i;
215
216 if (node->parent)
217 return 1;
218
219 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
220 if (node->slot[i])
221 return 1;
222
223 return 0;
224 }
225
226
check_rev_seq(struct maple_tree * mt,unsigned long max,bool verbose)227 static noinline void __init check_rev_seq(struct maple_tree *mt,
228 unsigned long max, bool verbose)
229 {
230 unsigned long i = max, j;
231
232 MT_BUG_ON(mt, !mtree_empty(mt));
233
234 mt_zero_nr_tallocated();
235 while (i) {
236 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
237 for (j = i; j <= max; j++)
238 check_index_load(mt, j);
239
240 check_load(mt, i - 1, NULL);
241 mt_set_in_rcu(mt);
242 MT_BUG_ON(mt, !mt_height(mt));
243 mt_clear_in_rcu(mt);
244 MT_BUG_ON(mt, !mt_height(mt));
245 i--;
246 }
247 check_load(mt, max + 1, NULL);
248
249 #ifndef __KERNEL__
250 if (verbose) {
251 rcu_barrier();
252 mt_dump(mt, mt_dump_dec);
253 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
254 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
255 mt_nr_tallocated());
256 }
257 #endif
258 }
259
check_seq(struct maple_tree * mt,unsigned long max,bool verbose)260 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
261 bool verbose)
262 {
263 unsigned long i, j;
264
265 MT_BUG_ON(mt, !mtree_empty(mt));
266
267 mt_zero_nr_tallocated();
268 for (i = 0; i <= max; i++) {
269 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
270 for (j = 0; j <= i; j++)
271 check_index_load(mt, j);
272
273 if (i)
274 MT_BUG_ON(mt, !mt_height(mt));
275 check_load(mt, i + 1, NULL);
276 }
277
278 #ifndef __KERNEL__
279 if (verbose) {
280 rcu_barrier();
281 mt_dump(mt, mt_dump_dec);
282 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
283 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
284 mt_nr_tallocated());
285 }
286 #endif
287 }
288
check_lb_not_empty(struct maple_tree * mt)289 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
290 {
291 unsigned long i, j;
292 unsigned long huge = 4000UL * 1000 * 1000;
293
294
295 i = huge;
296 while (i > 4096) {
297 check_insert(mt, i, (void *) i);
298 for (j = huge; j >= i; j /= 2) {
299 check_load(mt, j-1, NULL);
300 check_load(mt, j, (void *) j);
301 check_load(mt, j+1, NULL);
302 }
303 i /= 2;
304 }
305 mtree_destroy(mt);
306 }
307
check_lower_bound_split(struct maple_tree * mt)308 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
309 {
310 MT_BUG_ON(mt, !mtree_empty(mt));
311 check_lb_not_empty(mt);
312 }
313
check_upper_bound_split(struct maple_tree * mt)314 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
315 {
316 unsigned long i, j;
317 unsigned long huge;
318
319 MT_BUG_ON(mt, !mtree_empty(mt));
320
321 if (MAPLE_32BIT)
322 huge = 2147483647UL;
323 else
324 huge = 4000UL * 1000 * 1000;
325
326 i = 4096;
327 while (i < huge) {
328 check_insert(mt, i, (void *) i);
329 for (j = i; j >= huge; j *= 2) {
330 check_load(mt, j-1, NULL);
331 check_load(mt, j, (void *) j);
332 check_load(mt, j+1, NULL);
333 }
334 i *= 2;
335 }
336 mtree_destroy(mt);
337 }
338
check_mid_split(struct maple_tree * mt)339 static noinline void __init check_mid_split(struct maple_tree *mt)
340 {
341 unsigned long huge = 8000UL * 1000 * 1000;
342
343 check_insert(mt, huge, (void *) huge);
344 check_insert(mt, 0, xa_mk_value(0));
345 check_lb_not_empty(mt);
346 }
347
check_rev_find(struct maple_tree * mt)348 static noinline void __init check_rev_find(struct maple_tree *mt)
349 {
350 int i, nr_entries = 200;
351 void *val;
352 MA_STATE(mas, mt, 0, 0);
353
354 for (i = 0; i <= nr_entries; i++)
355 mtree_store_range(mt, i*10, i*10 + 5,
356 xa_mk_value(i), GFP_KERNEL);
357
358 rcu_read_lock();
359 mas_set(&mas, 1000);
360 val = mas_find_rev(&mas, 1000);
361 MT_BUG_ON(mt, val != xa_mk_value(100));
362 val = mas_find_rev(&mas, 1000);
363 MT_BUG_ON(mt, val != NULL);
364
365 mas_set(&mas, 999);
366 val = mas_find_rev(&mas, 997);
367 MT_BUG_ON(mt, val != NULL);
368
369 mas_set(&mas, 1000);
370 val = mas_find_rev(&mas, 900);
371 MT_BUG_ON(mt, val != xa_mk_value(100));
372 val = mas_find_rev(&mas, 900);
373 MT_BUG_ON(mt, val != xa_mk_value(99));
374
375 mas_set(&mas, 20);
376 val = mas_find_rev(&mas, 0);
377 MT_BUG_ON(mt, val != xa_mk_value(2));
378 val = mas_find_rev(&mas, 0);
379 MT_BUG_ON(mt, val != xa_mk_value(1));
380 val = mas_find_rev(&mas, 0);
381 MT_BUG_ON(mt, val != xa_mk_value(0));
382 val = mas_find_rev(&mas, 0);
383 MT_BUG_ON(mt, val != NULL);
384 rcu_read_unlock();
385 }
386
check_find(struct maple_tree * mt)387 static noinline void __init check_find(struct maple_tree *mt)
388 {
389 unsigned long val = 0;
390 unsigned long count;
391 unsigned long max;
392 unsigned long top;
393 unsigned long last = 0, index = 0;
394 void *entry, *entry2;
395
396 MA_STATE(mas, mt, 0, 0);
397
398 /* Insert 0. */
399 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
400
401 #if defined(CONFIG_64BIT)
402 top = 4398046511104UL;
403 #else
404 top = ULONG_MAX;
405 #endif
406
407 if (MAPLE_32BIT) {
408 count = 15;
409 } else {
410 count = 20;
411 }
412
413 for (int i = 0; i <= count; i++) {
414 if (val != 64)
415 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
416 else
417 MT_BUG_ON(mt, mtree_insert(mt, val,
418 XA_ZERO_ENTRY, GFP_KERNEL));
419
420 val <<= 2;
421 }
422
423 val = 0;
424 mas_set(&mas, val);
425 mas_lock(&mas);
426 while ((entry = mas_find(&mas, 268435456)) != NULL) {
427 if (val != 64)
428 MT_BUG_ON(mt, xa_mk_value(val) != entry);
429 else
430 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
431
432 val <<= 2;
433 /* For zero check. */
434 if (!val)
435 val = 1;
436 }
437 mas_unlock(&mas);
438
439 val = 0;
440 mas_set(&mas, val);
441 mas_lock(&mas);
442 mas_for_each(&mas, entry, ULONG_MAX) {
443 if (val != 64)
444 MT_BUG_ON(mt, xa_mk_value(val) != entry);
445 else
446 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
447 val <<= 2;
448 /* For zero check. */
449 if (!val)
450 val = 1;
451 }
452 mas_unlock(&mas);
453
454 /* Test mas_pause */
455 val = 0;
456 mas_set(&mas, val);
457 mas_lock(&mas);
458 mas_for_each(&mas, entry, ULONG_MAX) {
459 if (val != 64)
460 MT_BUG_ON(mt, xa_mk_value(val) != entry);
461 else
462 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
463 val <<= 2;
464 /* For zero check. */
465 if (!val)
466 val = 1;
467
468 mas_pause(&mas);
469 mas_unlock(&mas);
470 mas_lock(&mas);
471 }
472 mas_unlock(&mas);
473
474 val = 0;
475 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
476 mt_for_each(mt, entry, index, max) {
477 MT_BUG_ON(mt, xa_mk_value(val) != entry);
478 val <<= 2;
479 if (val == 64) /* Skip zero entry. */
480 val <<= 2;
481 /* For zero check. */
482 if (!val)
483 val = 1;
484 }
485
486 val = 0;
487 max = 0;
488 index = 0;
489 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
490 mt_for_each(mt, entry, index, ULONG_MAX) {
491 if (val == top)
492 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
493 else
494 MT_BUG_ON(mt, xa_mk_value(val) != entry);
495
496 /* Workaround for 32bit */
497 if ((val << 2) < val)
498 val = ULONG_MAX;
499 else
500 val <<= 2;
501
502 if (val == 64) /* Skip zero entry. */
503 val <<= 2;
504 /* For zero check. */
505 if (!val)
506 val = 1;
507 max++;
508 MT_BUG_ON(mt, max > 25);
509 }
510 mtree_erase_index(mt, ULONG_MAX);
511
512 mas_reset(&mas);
513 index = 17;
514 entry = mt_find(mt, &index, 512);
515 MT_BUG_ON(mt, xa_mk_value(256) != entry);
516
517 mas_reset(&mas);
518 index = 17;
519 entry = mt_find(mt, &index, 20);
520 MT_BUG_ON(mt, entry != NULL);
521
522
523 /* Range check.. */
524 /* Insert ULONG_MAX */
525 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
526
527 val = 0;
528 mas_set(&mas, 0);
529 mas_lock(&mas);
530 mas_for_each(&mas, entry, ULONG_MAX) {
531 if (val == 64)
532 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
533 else if (val == top)
534 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
535 else
536 MT_BUG_ON(mt, xa_mk_value(val) != entry);
537
538 /* Workaround for 32bit */
539 if ((val << 2) < val)
540 val = ULONG_MAX;
541 else
542 val <<= 2;
543
544 /* For zero check. */
545 if (!val)
546 val = 1;
547 mas_pause(&mas);
548 mas_unlock(&mas);
549 mas_lock(&mas);
550 }
551 mas_unlock(&mas);
552
553 mas_set(&mas, 1048576);
554 mas_lock(&mas);
555 entry = mas_find(&mas, 1048576);
556 mas_unlock(&mas);
557 MT_BUG_ON(mas.tree, entry == NULL);
558
559 /*
560 * Find last value.
561 * 1. get the expected value, leveraging the existence of an end entry
562 * 2. delete end entry
563 * 3. find the last value but searching for ULONG_MAX and then using
564 * prev
565 */
566 /* First, get the expected result. */
567 mas_lock(&mas);
568 mas_reset(&mas);
569 mas.index = ULONG_MAX; /* start at max.. */
570 entry = mas_find(&mas, ULONG_MAX);
571 entry = mas_prev(&mas, 0);
572 index = mas.index;
573 last = mas.last;
574
575 /* Erase the last entry. */
576 mas_reset(&mas);
577 mas.index = ULONG_MAX;
578 mas.last = ULONG_MAX;
579 mas_erase(&mas);
580
581 /* Get the previous value from MAS_START */
582 mas_reset(&mas);
583 entry2 = mas_prev(&mas, 0);
584
585 /* Check results. */
586 MT_BUG_ON(mt, entry != entry2);
587 MT_BUG_ON(mt, index != mas.index);
588 MT_BUG_ON(mt, last != mas.last);
589
590
591 mas.status = ma_none;
592 mas.index = ULONG_MAX;
593 mas.last = ULONG_MAX;
594 entry2 = mas_prev(&mas, 0);
595 MT_BUG_ON(mt, entry != entry2);
596
597 mas_set(&mas, 0);
598 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
599
600 mas_unlock(&mas);
601 mtree_destroy(mt);
602 }
603
check_find_2(struct maple_tree * mt)604 static noinline void __init check_find_2(struct maple_tree *mt)
605 {
606 unsigned long i, j;
607 void *entry;
608
609 MA_STATE(mas, mt, 0, 0);
610 rcu_read_lock();
611 mas_for_each(&mas, entry, ULONG_MAX)
612 MT_BUG_ON(mt, true);
613 rcu_read_unlock();
614
615 for (i = 0; i < 256; i++) {
616 mtree_insert_index(mt, i, GFP_KERNEL);
617 j = 0;
618 mas_set(&mas, 0);
619 rcu_read_lock();
620 mas_for_each(&mas, entry, ULONG_MAX) {
621 MT_BUG_ON(mt, entry != xa_mk_value(j));
622 j++;
623 }
624 rcu_read_unlock();
625 MT_BUG_ON(mt, j != i + 1);
626 }
627
628 for (i = 0; i < 256; i++) {
629 mtree_erase_index(mt, i);
630 j = i + 1;
631 mas_set(&mas, 0);
632 rcu_read_lock();
633 mas_for_each(&mas, entry, ULONG_MAX) {
634 if (xa_is_zero(entry))
635 continue;
636
637 MT_BUG_ON(mt, entry != xa_mk_value(j));
638 j++;
639 }
640 rcu_read_unlock();
641 MT_BUG_ON(mt, j != 256);
642 }
643
644 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
645 }
646
647
648 #if defined(CONFIG_64BIT)
check_alloc_rev_range(struct maple_tree * mt)649 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
650 {
651 /*
652 * Generated by:
653 * cat /proc/self/maps | awk '{print $1}'|
654 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
655 */
656
657 static const unsigned long range[] = {
658 /* Inclusive , Exclusive. */
659 0x565234af2000, 0x565234af4000,
660 0x565234af4000, 0x565234af9000,
661 0x565234af9000, 0x565234afb000,
662 0x565234afc000, 0x565234afd000,
663 0x565234afd000, 0x565234afe000,
664 0x565235def000, 0x565235e10000,
665 0x7f36d4bfd000, 0x7f36d4ee2000,
666 0x7f36d4ee2000, 0x7f36d4f04000,
667 0x7f36d4f04000, 0x7f36d504c000,
668 0x7f36d504c000, 0x7f36d5098000,
669 0x7f36d5098000, 0x7f36d5099000,
670 0x7f36d5099000, 0x7f36d509d000,
671 0x7f36d509d000, 0x7f36d509f000,
672 0x7f36d509f000, 0x7f36d50a5000,
673 0x7f36d50b9000, 0x7f36d50db000,
674 0x7f36d50db000, 0x7f36d50dc000,
675 0x7f36d50dc000, 0x7f36d50fa000,
676 0x7f36d50fa000, 0x7f36d5102000,
677 0x7f36d5102000, 0x7f36d5103000,
678 0x7f36d5103000, 0x7f36d5104000,
679 0x7f36d5104000, 0x7f36d5105000,
680 0x7fff5876b000, 0x7fff5878d000,
681 0x7fff5878e000, 0x7fff58791000,
682 0x7fff58791000, 0x7fff58793000,
683 };
684
685 static const unsigned long holes[] = {
686 /*
687 * Note: start of hole is INCLUSIVE
688 * end of hole is EXCLUSIVE
689 * (opposite of the above table.)
690 * Start of hole, end of hole, size of hole (+1)
691 */
692 0x565234afb000, 0x565234afc000, 0x1000,
693 0x565234afe000, 0x565235def000, 0x12F1000,
694 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
695 };
696
697 /*
698 * req_range consists of 4 values.
699 * 1. min index
700 * 2. max index
701 * 3. size
702 * 4. number that should be returned.
703 * 5. return value
704 */
705 static const unsigned long req_range[] = {
706 0x565234af9000, /* Min */
707 0x7fff58791000, /* Max */
708 0x1000, /* Size */
709 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
710 0, /* Return value success. */
711
712 0x0, /* Min */
713 0x565234AF0 << 12, /* Max */
714 0x3000, /* Size */
715 0x565234AEE << 12, /* max - 3. */
716 0, /* Return value success. */
717
718 0x0, /* Min */
719 -1, /* Max */
720 0x1000, /* Size */
721 562949953421311 << 12,/* First rev hole of size 0x1000 */
722 0, /* Return value success. */
723
724 0x0, /* Min */
725 0x7F36D5109 << 12, /* Max */
726 0x4000, /* Size */
727 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
728 0, /* Return value success. */
729
730 /* Ascend test. */
731 0x0,
732 34148798628 << 12,
733 19 << 12,
734 34148797418 << 12,
735 0x0,
736
737 /* Too big test. */
738 0x0,
739 18446744073709551615UL,
740 562915594369134UL << 12,
741 0x0,
742 -EBUSY,
743
744 /* Single space test. */
745 34148798725 << 12,
746 34148798725 << 12,
747 1 << 12,
748 34148798725 << 12,
749 0,
750 };
751
752 int i, range_count = ARRAY_SIZE(range);
753 int req_range_count = ARRAY_SIZE(req_range);
754 unsigned long min = 0;
755
756 MA_STATE(mas, mt, 0, 0);
757
758 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
759 GFP_KERNEL);
760 #define DEBUG_REV_RANGE 0
761 for (i = 0; i < range_count; i += 2) {
762 /* Inclusive, Inclusive (with the -1) */
763
764 #if DEBUG_REV_RANGE
765 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
766 (range[i + 1] >> 12) - 1);
767 #endif
768 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
769 xa_mk_value(range[i] >> 12), 0);
770 mt_validate(mt);
771 }
772
773
774 mas_lock(&mas);
775 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
776 #if DEBUG_REV_RANGE
777 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
778 min, holes[i+1]>>12, holes[i+2]>>12,
779 holes[i] >> 12);
780 #endif
781 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
782 holes[i+1] >> 12,
783 holes[i+2] >> 12));
784 #if DEBUG_REV_RANGE
785 pr_debug("Found %lu %lu\n", mas.index, mas.last);
786 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
787 (holes[i+1] >> 12));
788 #endif
789 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
790 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
791 min = holes[i+1] >> 12;
792 mas_reset(&mas);
793 }
794
795 mas_unlock(&mas);
796 for (i = 0; i < req_range_count; i += 5) {
797 #if DEBUG_REV_RANGE
798 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
799 i, req_range[i] >> 12,
800 (req_range[i + 1] >> 12),
801 req_range[i+2] >> 12,
802 req_range[i+3] >> 12);
803 #endif
804 check_mtree_alloc_rrange(mt,
805 req_range[i] >> 12, /* start */
806 req_range[i+1] >> 12, /* end */
807 req_range[i+2] >> 12, /* size */
808 req_range[i+3] >> 12, /* expected address */
809 req_range[i+4], /* expected return */
810 xa_mk_value(req_range[i] >> 12)); /* pointer */
811 mt_validate(mt);
812 }
813
814 mt_set_non_kernel(1);
815 mtree_erase(mt, 34148798727); /* create a deleted range. */
816 mtree_erase(mt, 34148798725);
817 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
818 34148798725, 0, mt);
819
820 mtree_destroy(mt);
821 }
822
check_alloc_range(struct maple_tree * mt)823 static noinline void __init check_alloc_range(struct maple_tree *mt)
824 {
825 /*
826 * Generated by:
827 * cat /proc/self/maps|awk '{print $1}'|
828 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
829 */
830
831 static const unsigned long range[] = {
832 /* Inclusive , Exclusive. */
833 0x565234af2000, 0x565234af4000,
834 0x565234af4000, 0x565234af9000,
835 0x565234af9000, 0x565234afb000,
836 0x565234afc000, 0x565234afd000,
837 0x565234afd000, 0x565234afe000,
838 0x565235def000, 0x565235e10000,
839 0x7f36d4bfd000, 0x7f36d4ee2000,
840 0x7f36d4ee2000, 0x7f36d4f04000,
841 0x7f36d4f04000, 0x7f36d504c000,
842 0x7f36d504c000, 0x7f36d5098000,
843 0x7f36d5098000, 0x7f36d5099000,
844 0x7f36d5099000, 0x7f36d509d000,
845 0x7f36d509d000, 0x7f36d509f000,
846 0x7f36d509f000, 0x7f36d50a5000,
847 0x7f36d50b9000, 0x7f36d50db000,
848 0x7f36d50db000, 0x7f36d50dc000,
849 0x7f36d50dc000, 0x7f36d50fa000,
850 0x7f36d50fa000, 0x7f36d5102000,
851 0x7f36d5102000, 0x7f36d5103000,
852 0x7f36d5103000, 0x7f36d5104000,
853 0x7f36d5104000, 0x7f36d5105000,
854 0x7fff5876b000, 0x7fff5878d000,
855 0x7fff5878e000, 0x7fff58791000,
856 0x7fff58791000, 0x7fff58793000,
857 };
858 static const unsigned long holes[] = {
859 /* Start of hole, end of hole, size of hole (+1) */
860 0x565234afb000, 0x565234afc000, 0x1000,
861 0x565234afe000, 0x565235def000, 0x12F1000,
862 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
863 };
864
865 /*
866 * req_range consists of 4 values.
867 * 1. min index
868 * 2. max index
869 * 3. size
870 * 4. number that should be returned.
871 * 5. return value
872 */
873 static const unsigned long req_range[] = {
874 0x565234af9000, /* Min */
875 0x7fff58791000, /* Max */
876 0x1000, /* Size */
877 0x565234afb000, /* First hole in our data of size 1000. */
878 0, /* Return value success. */
879
880 0x0, /* Min */
881 0x7fff58791000, /* Max */
882 0x1F00, /* Size */
883 0x0, /* First hole in our data of size 2000. */
884 0, /* Return value success. */
885
886 /* Test ascend. */
887 34148797436 << 12, /* Min */
888 0x7fff587AF000, /* Max */
889 0x3000, /* Size */
890 34148798629 << 12, /* Expected location */
891 0, /* Return value success. */
892
893 /* Test failing. */
894 34148798623 << 12, /* Min */
895 34148798683 << 12, /* Max */
896 0x15000, /* Size */
897 0, /* Expected location */
898 -EBUSY, /* Return value failed. */
899
900 /* Test filling entire gap. */
901 34148798623 << 12, /* Min */
902 0x7fff587AF000, /* Max */
903 0x10000, /* Size */
904 34148798632 << 12, /* Expected location */
905 0, /* Return value success. */
906
907 /* Test walking off the end of root. */
908 0, /* Min */
909 -1, /* Max */
910 -1, /* Size */
911 0, /* Expected location */
912 -EBUSY, /* Return value failure. */
913
914 /* Test looking for too large a hole across entire range. */
915 0, /* Min */
916 -1, /* Max */
917 4503599618982063UL << 12, /* Size */
918 34359052178 << 12, /* Expected location */
919 -EBUSY, /* Return failure. */
920
921 /* Test a single entry */
922 34148798648 << 12, /* Min */
923 34148798648 << 12, /* Max */
924 4096, /* Size of 1 */
925 34148798648 << 12, /* Location is the same as min/max */
926 0, /* Success */
927 };
928 int i, range_count = ARRAY_SIZE(range);
929 int req_range_count = ARRAY_SIZE(req_range);
930 unsigned long min = 0x565234af2000;
931 MA_STATE(mas, mt, 0, 0);
932
933 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
934 GFP_KERNEL);
935 for (i = 0; i < range_count; i += 2) {
936 #define DEBUG_ALLOC_RANGE 0
937 #if DEBUG_ALLOC_RANGE
938 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
939 (range[i + 1] >> 12) - 1);
940 mt_dump(mt, mt_dump_hex);
941 #endif
942 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
943 xa_mk_value(range[i] >> 12), 0);
944 mt_validate(mt);
945 }
946
947
948
949 mas_lock(&mas);
950 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
951
952 #if DEBUG_ALLOC_RANGE
953 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
954 holes[i+1] >> 12, holes[i+2] >> 12,
955 min, holes[i+1]);
956 #endif
957 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
958 holes[i+1] >> 12,
959 holes[i+2] >> 12));
960 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
961 min = holes[i+1];
962 mas_reset(&mas);
963 }
964 mas_unlock(&mas);
965 for (i = 0; i < req_range_count; i += 5) {
966 #if DEBUG_ALLOC_RANGE
967 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
968 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
969 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
970 req_range[i], req_range[i+1]);
971 #endif
972 check_mtree_alloc_range(mt,
973 req_range[i] >> 12, /* start */
974 req_range[i+1] >> 12, /* end */
975 req_range[i+2] >> 12, /* size */
976 req_range[i+3] >> 12, /* expected address */
977 req_range[i+4], /* expected return */
978 xa_mk_value(req_range[i] >> 12)); /* pointer */
979 mt_validate(mt);
980 #if DEBUG_ALLOC_RANGE
981 mt_dump(mt, mt_dump_hex);
982 #endif
983 }
984
985 mtree_destroy(mt);
986 }
987 #endif
988
check_ranges(struct maple_tree * mt)989 static noinline void __init check_ranges(struct maple_tree *mt)
990 {
991 int i, val, val2;
992 static const unsigned long r[] = {
993 10, 15,
994 20, 25,
995 17, 22, /* Overlaps previous range. */
996 9, 1000, /* Huge. */
997 100, 200,
998 45, 168,
999 118, 128,
1000 };
1001
1002 MT_BUG_ON(mt, !mtree_empty(mt));
1003 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
1004 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
1005 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1006 MT_BUG_ON(mt, !mt_height(mt));
1007 /* Store */
1008 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1009 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1010 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1011 MT_BUG_ON(mt, !mt_height(mt));
1012 mtree_destroy(mt);
1013 MT_BUG_ON(mt, mt_height(mt));
1014
1015 check_seq(mt, 50, false);
1016 mt_set_non_kernel(4);
1017 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1018 MT_BUG_ON(mt, !mt_height(mt));
1019 mtree_destroy(mt);
1020
1021 /* Create tree of 1-100 */
1022 check_seq(mt, 100, false);
1023 /* Store 45-168 */
1024 mt_set_non_kernel(10);
1025 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026 MT_BUG_ON(mt, !mt_height(mt));
1027 mtree_destroy(mt);
1028
1029 /* Create tree of 1-200 */
1030 check_seq(mt, 200, false);
1031 /* Store 45-168 */
1032 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1033 MT_BUG_ON(mt, !mt_height(mt));
1034 mtree_destroy(mt);
1035
1036 check_seq(mt, 30, false);
1037 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1038 MT_BUG_ON(mt, !mt_height(mt));
1039 mtree_destroy(mt);
1040
1041 /* Overwrite across multiple levels. */
1042 /* Create tree of 1-400 */
1043 check_seq(mt, 400, false);
1044 mt_set_non_kernel(50);
1045 /* Store 118-128 */
1046 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1047 mt_set_non_kernel(50);
1048 mtree_test_erase(mt, 140);
1049 mtree_test_erase(mt, 141);
1050 mtree_test_erase(mt, 142);
1051 mtree_test_erase(mt, 143);
1052 mtree_test_erase(mt, 130);
1053 mtree_test_erase(mt, 131);
1054 mtree_test_erase(mt, 132);
1055 mtree_test_erase(mt, 133);
1056 mtree_test_erase(mt, 134);
1057 mtree_test_erase(mt, 135);
1058 check_load(mt, r[12], xa_mk_value(r[12]));
1059 check_load(mt, r[13], xa_mk_value(r[12]));
1060 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1061 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1062 check_load(mt, 135, NULL);
1063 check_load(mt, 140, NULL);
1064 mt_set_non_kernel(0);
1065 MT_BUG_ON(mt, !mt_height(mt));
1066 mtree_destroy(mt);
1067
1068
1069
1070 /* Overwrite multiple levels at the end of the tree (slot 7) */
1071 mt_set_non_kernel(50);
1072 check_seq(mt, 400, false);
1073 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1075
1076 check_load(mt, 346, xa_mk_value(346));
1077 for (i = 347; i <= 352; i++)
1078 check_load(mt, i, xa_mk_value(347));
1079 for (i = 353; i <= 361; i++)
1080 check_load(mt, i, xa_mk_value(353));
1081 check_load(mt, 362, xa_mk_value(362));
1082 mt_set_non_kernel(0);
1083 MT_BUG_ON(mt, !mt_height(mt));
1084 mtree_destroy(mt);
1085
1086 mt_set_non_kernel(50);
1087 check_seq(mt, 400, false);
1088 check_store_range(mt, 352, 364, NULL, 0);
1089 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1090 check_load(mt, 350, xa_mk_value(350));
1091 check_load(mt, 351, xa_mk_value(352));
1092 for (i = 352; i <= 363; i++)
1093 check_load(mt, i, xa_mk_value(352));
1094 check_load(mt, 364, NULL);
1095 check_load(mt, 365, xa_mk_value(365));
1096 mt_set_non_kernel(0);
1097 MT_BUG_ON(mt, !mt_height(mt));
1098 mtree_destroy(mt);
1099
1100 mt_set_non_kernel(5);
1101 check_seq(mt, 400, false);
1102 check_store_range(mt, 352, 364, NULL, 0);
1103 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1104 check_load(mt, 350, xa_mk_value(350));
1105 check_load(mt, 351, xa_mk_value(352));
1106 for (i = 352; i <= 364; i++)
1107 check_load(mt, i, xa_mk_value(352));
1108 check_load(mt, 365, xa_mk_value(365));
1109 mt_set_non_kernel(0);
1110 MT_BUG_ON(mt, !mt_height(mt));
1111 mtree_destroy(mt);
1112
1113
1114 mt_set_non_kernel(50);
1115 check_seq(mt, 400, false);
1116 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1117 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1118 mt_set_non_kernel(0);
1119 mt_validate(mt);
1120 MT_BUG_ON(mt, !mt_height(mt));
1121 mtree_destroy(mt);
1122 /*
1123 * Interesting cases:
1124 * 1. Overwrite the end of a node and end in the first entry of the next
1125 * node.
1126 * 2. Split a single range
1127 * 3. Overwrite the start of a range
1128 * 4. Overwrite the end of a range
1129 * 5. Overwrite the entire range
1130 * 6. Overwrite a range that causes multiple parent nodes to be
1131 * combined
1132 * 7. Overwrite a range that causes multiple parent nodes and part of
1133 * root to be combined
1134 * 8. Overwrite the whole tree
1135 * 9. Try to overwrite the zero entry of an alloc tree.
1136 * 10. Write a range larger than a nodes current pivot
1137 */
1138
1139 mt_set_non_kernel(50);
1140 for (i = 0; i <= 500; i++) {
1141 val = i*5;
1142 val2 = (i+1)*5;
1143 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1144 }
1145 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1146 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1147 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1148 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1149 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1150 mtree_destroy(mt);
1151 mt_set_non_kernel(0);
1152
1153 mt_set_non_kernel(50);
1154 for (i = 0; i <= 500; i++) {
1155 val = i*5;
1156 val2 = (i+1)*5;
1157 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1158 }
1159 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1160 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1161 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1162 check_store_range(mt, 2460, 2470, NULL, 0);
1163 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1164 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1165 mt_set_non_kernel(0);
1166 MT_BUG_ON(mt, !mt_height(mt));
1167 mtree_destroy(mt);
1168
1169 /* Check in-place modifications */
1170 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1171 /* Append to the start of last range */
1172 mt_set_non_kernel(50);
1173 for (i = 0; i <= 500; i++) {
1174 val = i * 5 + 1;
1175 val2 = val + 4;
1176 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1177 }
1178
1179 /* Append to the last range without touching any boundaries */
1180 for (i = 0; i < 10; i++) {
1181 val = val2 + 5;
1182 val2 = val + 4;
1183 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1184 }
1185
1186 /* Append to the end of last range */
1187 val = val2;
1188 for (i = 0; i < 10; i++) {
1189 val += 5;
1190 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1191 xa_mk_value(val)) != 0);
1192 }
1193
1194 /* Overwriting the range and over a part of the next range */
1195 for (i = 10; i < 30; i += 2) {
1196 val = i * 5 + 1;
1197 val2 = val + 5;
1198 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1199 }
1200
1201 /* Overwriting a part of the range and over the next range */
1202 for (i = 50; i < 70; i += 2) {
1203 val2 = i * 5;
1204 val = val2 - 5;
1205 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1206 }
1207
1208 /*
1209 * Expand the range, only partially overwriting the previous and
1210 * next ranges
1211 */
1212 for (i = 100; i < 130; i += 3) {
1213 val = i * 5 - 5;
1214 val2 = i * 5 + 1;
1215 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1216 }
1217
1218 /*
1219 * Expand the range, only partially overwriting the previous and
1220 * next ranges, in RCU mode
1221 */
1222 mt_set_in_rcu(mt);
1223 for (i = 150; i < 180; i += 3) {
1224 val = i * 5 - 5;
1225 val2 = i * 5 + 1;
1226 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1227 }
1228
1229 MT_BUG_ON(mt, !mt_height(mt));
1230 mt_validate(mt);
1231 mt_set_non_kernel(0);
1232 mtree_destroy(mt);
1233
1234 /* Test rebalance gaps */
1235 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1236 mt_set_non_kernel(50);
1237 for (i = 0; i <= 50; i++) {
1238 val = i*10;
1239 val2 = (i+1)*10;
1240 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1241 }
1242 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1243 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1244 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1245 check_store_range(mt, 240, 249, NULL, 0);
1246 mtree_erase(mt, 200);
1247 mtree_erase(mt, 210);
1248 mtree_erase(mt, 220);
1249 mtree_erase(mt, 230);
1250 mt_set_non_kernel(0);
1251 MT_BUG_ON(mt, !mt_height(mt));
1252 mtree_destroy(mt);
1253
1254 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1255 for (i = 0; i <= 500; i++) {
1256 val = i*10;
1257 val2 = (i+1)*10;
1258 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1259 }
1260 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1261 mt_validate(mt);
1262 MT_BUG_ON(mt, !mt_height(mt));
1263 mtree_destroy(mt);
1264
1265 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1266 for (i = 0; i <= 500; i++) {
1267 val = i*10;
1268 val2 = (i+1)*10;
1269 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1270 }
1271 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1272 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1273 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1274 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1275 check_store_range(mt, 4842, 4849, NULL, 0);
1276 mt_validate(mt);
1277 MT_BUG_ON(mt, !mt_height(mt));
1278 mtree_destroy(mt);
1279
1280 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1281 for (i = 0; i <= 1300; i++) {
1282 val = i*10;
1283 val2 = (i+1)*10;
1284 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1285 MT_BUG_ON(mt, mt_height(mt) >= 4);
1286 }
1287 /* Cause a 3 child split all the way up the tree. */
1288 for (i = 5; i < 215; i += 10)
1289 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1290 for (i = 5; i < 65; i += 10)
1291 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1292
1293 MT_BUG_ON(mt, mt_height(mt) >= 4);
1294 for (i = 5; i < 45; i += 10)
1295 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1296 if (!MAPLE_32BIT)
1297 MT_BUG_ON(mt, mt_height(mt) < 4);
1298 mtree_destroy(mt);
1299
1300
1301 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1302 for (i = 0; i <= 1200; i++) {
1303 val = i*10;
1304 val2 = (i+1)*10;
1305 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1306 MT_BUG_ON(mt, mt_height(mt) >= 4);
1307 }
1308 /* Fill parents and leaves before split. */
1309 for (i = 5; i < 455; i += 10)
1310 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1311
1312 for (i = 1; i < 16; i++)
1313 check_store_range(mt, 8185 + i, 8185 + i + 1,
1314 xa_mk_value(8185+i), 0);
1315 MT_BUG_ON(mt, mt_height(mt) >= 4);
1316 /* triple split across multiple levels. */
1317 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1318 if (!MAPLE_32BIT)
1319 MT_BUG_ON(mt, mt_height(mt) != 4);
1320 }
1321
check_next_entry(struct maple_tree * mt)1322 static noinline void __init check_next_entry(struct maple_tree *mt)
1323 {
1324 void *entry = NULL;
1325 unsigned long limit = 30, i = 0;
1326 MA_STATE(mas, mt, i, i);
1327
1328 MT_BUG_ON(mt, !mtree_empty(mt));
1329
1330 check_seq(mt, limit, false);
1331 rcu_read_lock();
1332
1333 /* Check the first one and get ma_state in the correct state. */
1334 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1335 for ( ; i <= limit + 1; i++) {
1336 entry = mas_next(&mas, limit);
1337 if (i > limit)
1338 MT_BUG_ON(mt, entry != NULL);
1339 else
1340 MT_BUG_ON(mt, xa_mk_value(i) != entry);
1341 }
1342 rcu_read_unlock();
1343 mtree_destroy(mt);
1344 }
1345
check_prev_entry(struct maple_tree * mt)1346 static noinline void __init check_prev_entry(struct maple_tree *mt)
1347 {
1348 unsigned long index = 16;
1349 void *value;
1350 int i;
1351
1352 MA_STATE(mas, mt, index, index);
1353
1354 MT_BUG_ON(mt, !mtree_empty(mt));
1355 check_seq(mt, 30, false);
1356
1357 rcu_read_lock();
1358 value = mas_find(&mas, ULONG_MAX);
1359 MT_BUG_ON(mt, value != xa_mk_value(index));
1360 value = mas_prev(&mas, 0);
1361 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1362 rcu_read_unlock();
1363 mtree_destroy(mt);
1364
1365 /* Check limits on prev */
1366 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1367 mas_lock(&mas);
1368 for (i = 0; i <= index; i++) {
1369 mas_set_range(&mas, i*10, i*10+5);
1370 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1371 }
1372
1373 mas_set(&mas, 20);
1374 value = mas_walk(&mas);
1375 MT_BUG_ON(mt, value != xa_mk_value(2));
1376
1377 value = mas_prev(&mas, 19);
1378 MT_BUG_ON(mt, value != NULL);
1379
1380 mas_set(&mas, 80);
1381 value = mas_walk(&mas);
1382 MT_BUG_ON(mt, value != xa_mk_value(8));
1383
1384 value = mas_prev(&mas, 76);
1385 MT_BUG_ON(mt, value != NULL);
1386
1387 mas_unlock(&mas);
1388 }
1389
check_store_null(struct maple_tree * mt)1390 static noinline void __init check_store_null(struct maple_tree *mt)
1391 {
1392 MA_STATE(mas, mt, 0, ULONG_MAX);
1393
1394 /*
1395 * Store NULL at range [0, ULONG_MAX] to an empty tree should result
1396 * in an empty tree
1397 */
1398 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1399 mas_lock(&mas);
1400 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1401 MT_BUG_ON(mt, !mtree_empty(mt));
1402 mas_unlock(&mas);
1403 mtree_destroy(mt);
1404
1405 /*
1406 * Store NULL at any range to an empty tree should result in an empty
1407 * tree
1408 */
1409 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1410 mas_lock(&mas);
1411 mas_set_range(&mas, 3, 10);
1412 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1413 MT_BUG_ON(mt, !mtree_empty(mt));
1414 mas_unlock(&mas);
1415 mtree_destroy(mt);
1416
1417 /*
1418 * Store NULL at range [0, ULONG_MAX] to a single entry tree should
1419 * result in an empty tree
1420 */
1421 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1422 mas_lock(&mas);
1423 mas_set(&mas, 0);
1424 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1425 mas_set_range(&mas, 0, ULONG_MAX);
1426 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1427 MT_BUG_ON(mt, !mtree_empty(mt));
1428 mas_unlock(&mas);
1429 mtree_destroy(mt);
1430
1431 /*
1432 * Store NULL at range [0, n] to a single entry tree should
1433 * result in an empty tree
1434 */
1435 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1436 mas_lock(&mas);
1437 mas_set(&mas, 0);
1438 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1439 mas_set_range(&mas, 0, 5);
1440 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1441 MT_BUG_ON(mt, !mtree_empty(mt));
1442 mas_unlock(&mas);
1443 mtree_destroy(mt);
1444
1445 /*
1446 * Store NULL at range [m, n] where m > 0 to a single entry tree
1447 * should still be a single entry tree
1448 */
1449 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1450 mas_lock(&mas);
1451 mas_set(&mas, 0);
1452 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1453 mas_set_range(&mas, 2, 5);
1454 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1455 MT_BUG_ON(mt, mtree_empty(mt));
1456 // MT_BUG_ON(mt, xa_is_node(mas_root(&mas)));
1457 mas_unlock(&mas);
1458 mtree_destroy(mt);
1459
1460 /*
1461 * Store NULL at range [0, ULONG_MAX] to a tree with node should
1462 * result in an empty tree
1463 */
1464 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1465 mas_lock(&mas);
1466 mas_set_range(&mas, 1, 3);
1467 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1468 // MT_BUG_ON(mt, !xa_is_node(mas_root(&mas)));
1469 mas_set_range(&mas, 0, ULONG_MAX);
1470 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1471 MT_BUG_ON(mt, !mtree_empty(mt));
1472 mas_unlock(&mas);
1473 mtree_destroy(mt);
1474 }
1475
check_root_expand(struct maple_tree * mt)1476 static noinline void __init check_root_expand(struct maple_tree *mt)
1477 {
1478 MA_STATE(mas, mt, 0, 0);
1479 void *ptr;
1480
1481
1482 mas_lock(&mas);
1483 mas_set(&mas, 3);
1484 ptr = mas_walk(&mas);
1485 MT_BUG_ON(mt, mas.index != 0);
1486 MT_BUG_ON(mt, ptr != NULL);
1487 MT_BUG_ON(mt, mas.index != 0);
1488 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1489
1490 ptr = &check_prev_entry;
1491 mas_set(&mas, 1);
1492 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1493
1494 mas_set(&mas, 0);
1495 ptr = mas_walk(&mas);
1496 MT_BUG_ON(mt, ptr != NULL);
1497
1498 mas_set(&mas, 1);
1499 ptr = mas_walk(&mas);
1500 MT_BUG_ON(mt, ptr != &check_prev_entry);
1501
1502 mas_set(&mas, 2);
1503 ptr = mas_walk(&mas);
1504 MT_BUG_ON(mt, ptr != NULL);
1505 mas_unlock(&mas);
1506 mtree_destroy(mt);
1507
1508
1509 mt_init_flags(mt, 0);
1510 mas_lock(&mas);
1511
1512 mas_set(&mas, 0);
1513 ptr = &check_prev_entry;
1514 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1515
1516 mas_set(&mas, 5);
1517 ptr = mas_walk(&mas);
1518 MT_BUG_ON(mt, ptr != NULL);
1519 MT_BUG_ON(mt, mas.index != 1);
1520 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1521
1522 mas_set_range(&mas, 0, 100);
1523 ptr = mas_walk(&mas);
1524 MT_BUG_ON(mt, ptr != &check_prev_entry);
1525 MT_BUG_ON(mt, mas.last != 0);
1526 mas_unlock(&mas);
1527 mtree_destroy(mt);
1528
1529 mt_init_flags(mt, 0);
1530 mas_lock(&mas);
1531
1532 mas_set(&mas, 0);
1533 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1534 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1535 ptr = mas_next(&mas, ULONG_MAX);
1536 MT_BUG_ON(mt, ptr != NULL);
1537 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1538
1539 mas_set(&mas, 1);
1540 ptr = mas_prev(&mas, 0);
1541 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1542 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1543
1544 mas_unlock(&mas);
1545
1546 mtree_destroy(mt);
1547
1548 mt_init_flags(mt, 0);
1549 mas_lock(&mas);
1550 mas_set(&mas, 0);
1551 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1552 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1553 ptr = mas_next(&mas, ULONG_MAX);
1554 MT_BUG_ON(mt, ptr != NULL);
1555 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1556
1557 mas_set(&mas, 1);
1558 ptr = mas_prev(&mas, 0);
1559 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1560 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1561
1562
1563 mas_unlock(&mas);
1564 }
1565
check_deficient_node(struct maple_tree * mt)1566 static noinline void __init check_deficient_node(struct maple_tree *mt)
1567 {
1568 MA_STATE(mas, mt, 0, 0);
1569 int count;
1570
1571 mas_lock(&mas);
1572 for (count = 0; count < 10; count++) {
1573 mas_set(&mas, count);
1574 mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1575 }
1576
1577 for (count = 20; count < 39; count++) {
1578 mas_set(&mas, count);
1579 mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1580 }
1581
1582 for (count = 10; count < 12; count++) {
1583 mas_set(&mas, count);
1584 mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1585 }
1586 mas_unlock(&mas);
1587 mt_validate(mt);
1588 }
1589
check_gap_combining(struct maple_tree * mt)1590 static noinline void __init check_gap_combining(struct maple_tree *mt)
1591 {
1592 struct maple_enode *mn1, *mn2;
1593 void *entry;
1594 unsigned long singletons = 100;
1595 static const unsigned long *seq100;
1596 static const unsigned long seq100_64[] = {
1597 /* 0-5 */
1598 74, 75, 76,
1599 50, 100, 2,
1600
1601 /* 6-12 */
1602 44, 45, 46, 43,
1603 20, 50, 3,
1604
1605 /* 13-20*/
1606 80, 81, 82,
1607 76, 2, 79, 85, 4,
1608 };
1609
1610 static const unsigned long seq100_32[] = {
1611 /* 0-5 */
1612 61, 62, 63,
1613 50, 100, 2,
1614
1615 /* 6-12 */
1616 31, 32, 33, 30,
1617 20, 50, 3,
1618
1619 /* 13-20*/
1620 80, 81, 82,
1621 76, 2, 79, 85, 4,
1622 };
1623
1624 static const unsigned long seq2000[] = {
1625 1152, 1151,
1626 1100, 1200, 2,
1627 };
1628 static const unsigned long seq400[] = {
1629 286, 318,
1630 256, 260, 266, 270, 275, 280, 290, 398,
1631 286, 310,
1632 };
1633
1634 unsigned long index;
1635
1636 MA_STATE(mas, mt, 0, 0);
1637
1638 if (MAPLE_32BIT)
1639 seq100 = seq100_32;
1640 else
1641 seq100 = seq100_64;
1642
1643 index = seq100[0];
1644 mas_set(&mas, index);
1645 MT_BUG_ON(mt, !mtree_empty(mt));
1646 check_seq(mt, singletons, false); /* create 100 singletons. */
1647
1648 mt_set_non_kernel(1);
1649 mtree_test_erase(mt, seq100[2]);
1650 check_load(mt, seq100[2], NULL);
1651 mtree_test_erase(mt, seq100[1]);
1652 check_load(mt, seq100[1], NULL);
1653
1654 rcu_read_lock();
1655 entry = mas_find(&mas, ULONG_MAX);
1656 MT_BUG_ON(mt, entry != xa_mk_value(index));
1657 mn1 = mas.node;
1658 mas_next(&mas, ULONG_MAX);
1659 entry = mas_next(&mas, ULONG_MAX);
1660 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1661 mn2 = mas.node;
1662 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1663
1664 /*
1665 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1666 * seq100[4]. Search for the gap.
1667 */
1668 mt_set_non_kernel(1);
1669 mas_reset(&mas);
1670 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1671 seq100[5]));
1672 MT_BUG_ON(mt, mas.index != index + 1);
1673 rcu_read_unlock();
1674
1675 mtree_test_erase(mt, seq100[6]);
1676 check_load(mt, seq100[6], NULL);
1677 mtree_test_erase(mt, seq100[7]);
1678 check_load(mt, seq100[7], NULL);
1679 mtree_test_erase(mt, seq100[8]);
1680 index = seq100[9];
1681
1682 rcu_read_lock();
1683 mas.index = index;
1684 mas.last = index;
1685 mas_reset(&mas);
1686 entry = mas_find(&mas, ULONG_MAX);
1687 MT_BUG_ON(mt, entry != xa_mk_value(index));
1688 mn1 = mas.node;
1689 entry = mas_next(&mas, ULONG_MAX);
1690 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1691 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1692 mn2 = mas.node;
1693 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1694
1695 /*
1696 * At this point, there is a gap of 3 at seq100[6]. Find it by
1697 * searching 20 - 50 for size 3.
1698 */
1699 mas_reset(&mas);
1700 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1701 seq100[12]));
1702 MT_BUG_ON(mt, mas.index != seq100[6]);
1703 rcu_read_unlock();
1704
1705 mt_set_non_kernel(1);
1706 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1707 check_load(mt, seq100[13], NULL);
1708 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1709 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1710 check_load(mt, seq100[13], NULL);
1711 check_load(mt, seq100[14], NULL);
1712
1713 mas_reset(&mas);
1714 rcu_read_lock();
1715 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1716 seq100[17]));
1717 MT_BUG_ON(mt, mas.index != seq100[13]);
1718 mt_validate(mt);
1719 rcu_read_unlock();
1720
1721 /*
1722 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1723 * gap.
1724 */
1725 mt_set_non_kernel(2);
1726 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1727 mtree_test_erase(mt, seq100[15]);
1728 mas_reset(&mas);
1729 rcu_read_lock();
1730 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1731 seq100[20]));
1732 rcu_read_unlock();
1733 MT_BUG_ON(mt, mas.index != seq100[18]);
1734 mt_validate(mt);
1735 mtree_destroy(mt);
1736
1737 /* seq 2000 tests are for multi-level tree gaps */
1738 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1739 check_seq(mt, 2000, false);
1740 mt_set_non_kernel(1);
1741 mtree_test_erase(mt, seq2000[0]);
1742 mtree_test_erase(mt, seq2000[1]);
1743
1744 mt_set_non_kernel(2);
1745 mas_reset(&mas);
1746 rcu_read_lock();
1747 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1748 seq2000[4]));
1749 MT_BUG_ON(mt, mas.index != seq2000[1]);
1750 rcu_read_unlock();
1751 mt_validate(mt);
1752 mtree_destroy(mt);
1753
1754 /* seq 400 tests rebalancing over two levels. */
1755 mt_set_non_kernel(99);
1756 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1757 check_seq(mt, 400, false);
1758 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1759 mt_set_non_kernel(0);
1760 mtree_destroy(mt);
1761
1762 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1763 check_seq(mt, 400, false);
1764 mt_set_non_kernel(50);
1765 mtree_test_store_range(mt, seq400[2], seq400[9],
1766 xa_mk_value(seq400[2]));
1767 mtree_test_store_range(mt, seq400[3], seq400[9],
1768 xa_mk_value(seq400[3]));
1769 mtree_test_store_range(mt, seq400[4], seq400[9],
1770 xa_mk_value(seq400[4]));
1771 mtree_test_store_range(mt, seq400[5], seq400[9],
1772 xa_mk_value(seq400[5]));
1773 mtree_test_store_range(mt, seq400[0], seq400[9],
1774 xa_mk_value(seq400[0]));
1775 mtree_test_store_range(mt, seq400[6], seq400[9],
1776 xa_mk_value(seq400[6]));
1777 mtree_test_store_range(mt, seq400[7], seq400[9],
1778 xa_mk_value(seq400[7]));
1779 mtree_test_store_range(mt, seq400[8], seq400[9],
1780 xa_mk_value(seq400[8]));
1781 mtree_test_store_range(mt, seq400[10], seq400[11],
1782 xa_mk_value(seq400[10]));
1783 mt_validate(mt);
1784 mt_set_non_kernel(0);
1785 mtree_destroy(mt);
1786 }
check_node_overwrite(struct maple_tree * mt)1787 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1788 {
1789 int i, max = 4000;
1790
1791 for (i = 0; i < max; i++)
1792 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1793
1794 mtree_test_store_range(mt, 319951, 367950, NULL);
1795 /*mt_dump(mt, mt_dump_dec); */
1796 mt_validate(mt);
1797 }
1798
1799 #if defined(BENCH_SLOT_STORE)
bench_slot_store(struct maple_tree * mt)1800 static noinline void __init bench_slot_store(struct maple_tree *mt)
1801 {
1802 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1803
1804 for (i = 0; i < max; i += 10)
1805 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1806
1807 for (i = 0; i < count; i++) {
1808 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1809 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1810 GFP_KERNEL);
1811 }
1812 }
1813 #endif
1814
1815 #if defined(BENCH_NODE_STORE)
bench_node_store(struct maple_tree * mt)1816 static noinline void __init bench_node_store(struct maple_tree *mt)
1817 {
1818 int i, overwrite = 76, max = 240, count = 20000000;
1819
1820 for (i = 0; i < max; i += 10)
1821 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1822
1823 for (i = 0; i < count; i++) {
1824 mtree_store_range(mt, overwrite, overwrite + 15,
1825 xa_mk_value(overwrite), GFP_KERNEL);
1826
1827 overwrite += 5;
1828 if (overwrite >= 135)
1829 overwrite = 76;
1830 }
1831 }
1832 #endif
1833
1834 #if defined(BENCH_AWALK)
bench_awalk(struct maple_tree * mt)1835 static noinline void __init bench_awalk(struct maple_tree *mt)
1836 {
1837 int i, max = 2500, count = 50000000;
1838 MA_STATE(mas, mt, 1470, 1470);
1839
1840 for (i = 0; i < max; i += 10)
1841 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1842
1843 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1844
1845 for (i = 0; i < count; i++) {
1846 mas_empty_area_rev(&mas, 0, 2000, 10);
1847 mas_reset(&mas);
1848 }
1849 }
1850 #endif
1851 #if defined(BENCH_WALK)
bench_walk(struct maple_tree * mt)1852 static noinline void __init bench_walk(struct maple_tree *mt)
1853 {
1854 int i, max = 2500, count = 550000000;
1855 MA_STATE(mas, mt, 1470, 1470);
1856
1857 for (i = 0; i < max; i += 10)
1858 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1859
1860 for (i = 0; i < count; i++) {
1861 mas_walk(&mas);
1862 mas_reset(&mas);
1863 }
1864
1865 }
1866 #endif
1867
1868 #if defined(BENCH_LOAD)
bench_load(struct maple_tree * mt)1869 static noinline void __init bench_load(struct maple_tree *mt)
1870 {
1871 int i, max = 2500, count = 550000000;
1872
1873 for (i = 0; i < max; i += 10)
1874 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1875
1876 for (i = 0; i < count; i++)
1877 mtree_load(mt, 1470);
1878 }
1879 #endif
1880
1881 #if defined(BENCH_MT_FOR_EACH)
bench_mt_for_each(struct maple_tree * mt)1882 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1883 {
1884 int i, count = 1000000;
1885 unsigned long max = 2500, index = 0;
1886 void *entry;
1887
1888 for (i = 0; i < max; i += 5)
1889 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1890
1891 for (i = 0; i < count; i++) {
1892 unsigned long j = 0;
1893
1894 mt_for_each(mt, entry, index, max) {
1895 MT_BUG_ON(mt, entry != xa_mk_value(j));
1896 j += 5;
1897 }
1898
1899 index = 0;
1900 }
1901
1902 }
1903 #endif
1904
1905 #if defined(BENCH_MAS_FOR_EACH)
bench_mas_for_each(struct maple_tree * mt)1906 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1907 {
1908 int i, count = 1000000;
1909 unsigned long max = 2500;
1910 void *entry;
1911 MA_STATE(mas, mt, 0, 0);
1912
1913 for (i = 0; i < max; i += 5) {
1914 int gap = 4;
1915
1916 if (i % 30 == 0)
1917 gap = 3;
1918 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1919 }
1920
1921 rcu_read_lock();
1922 for (i = 0; i < count; i++) {
1923 unsigned long j = 0;
1924
1925 mas_for_each(&mas, entry, max) {
1926 MT_BUG_ON(mt, entry != xa_mk_value(j));
1927 j += 5;
1928 }
1929 mas_set(&mas, 0);
1930 }
1931 rcu_read_unlock();
1932
1933 }
1934 #endif
1935 #if defined(BENCH_MAS_PREV)
bench_mas_prev(struct maple_tree * mt)1936 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1937 {
1938 int i, count = 1000000;
1939 unsigned long max = 2500;
1940 void *entry;
1941 MA_STATE(mas, mt, 0, 0);
1942
1943 for (i = 0; i < max; i += 5) {
1944 int gap = 4;
1945
1946 if (i % 30 == 0)
1947 gap = 3;
1948 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1949 }
1950
1951 rcu_read_lock();
1952 for (i = 0; i < count; i++) {
1953 unsigned long j = 2495;
1954
1955 mas_set(&mas, ULONG_MAX);
1956 while ((entry = mas_prev(&mas, 0)) != NULL) {
1957 MT_BUG_ON(mt, entry != xa_mk_value(j));
1958 j -= 5;
1959 }
1960 }
1961 rcu_read_unlock();
1962
1963 }
1964 #endif
1965 /* check_forking - simulate the kernel forking sequence with the tree. */
check_forking(void)1966 static noinline void __init check_forking(void)
1967 {
1968 struct maple_tree mt, newmt;
1969 int i, nr_entries = 134, ret;
1970 void *val;
1971 MA_STATE(mas, &mt, 0, 0);
1972 MA_STATE(newmas, &newmt, 0, 0);
1973 struct rw_semaphore mt_lock, newmt_lock;
1974
1975 init_rwsem(&mt_lock);
1976 init_rwsem(&newmt_lock);
1977
1978 mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1979 mt_set_external_lock(&mt, &mt_lock);
1980
1981 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1982 mt_set_external_lock(&newmt, &newmt_lock);
1983
1984 down_write(&mt_lock);
1985 for (i = 0; i <= nr_entries; i++) {
1986 mas_set_range(&mas, i*10, i*10 + 5);
1987 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1988 }
1989
1990 down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
1991 ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
1992 if (ret) {
1993 pr_err("OOM!");
1994 BUG_ON(1);
1995 }
1996
1997 mas_set(&newmas, 0);
1998 mas_for_each(&newmas, val, ULONG_MAX)
1999 mas_store(&newmas, val);
2000
2001 mas_destroy(&newmas);
2002 mas_destroy(&mas);
2003 mt_validate(&newmt);
2004 __mt_destroy(&newmt);
2005 __mt_destroy(&mt);
2006 up_write(&newmt_lock);
2007 up_write(&mt_lock);
2008 }
2009
check_iteration(struct maple_tree * mt)2010 static noinline void __init check_iteration(struct maple_tree *mt)
2011 {
2012 int i, nr_entries = 125;
2013 void *val;
2014 MA_STATE(mas, mt, 0, 0);
2015
2016 for (i = 0; i <= nr_entries; i++)
2017 mtree_store_range(mt, i * 10, i * 10 + 9,
2018 xa_mk_value(i), GFP_KERNEL);
2019
2020 mt_set_non_kernel(99999);
2021
2022 i = 0;
2023 mas_lock(&mas);
2024 mas_for_each(&mas, val, 925) {
2025 MT_BUG_ON(mt, mas.index != i * 10);
2026 MT_BUG_ON(mt, mas.last != i * 10 + 9);
2027 /* Overwrite end of entry 92 */
2028 if (i == 92) {
2029 mas.index = 925;
2030 mas.last = 929;
2031 mas_store(&mas, val);
2032 }
2033 i++;
2034 }
2035 /* Ensure mas_find() gets the next value */
2036 val = mas_find(&mas, ULONG_MAX);
2037 MT_BUG_ON(mt, val != xa_mk_value(i));
2038
2039 mas_set(&mas, 0);
2040 i = 0;
2041 mas_for_each(&mas, val, 785) {
2042 MT_BUG_ON(mt, mas.index != i * 10);
2043 MT_BUG_ON(mt, mas.last != i * 10 + 9);
2044 /* Overwrite start of entry 78 */
2045 if (i == 78) {
2046 mas.index = 780;
2047 mas.last = 785;
2048 mas_store(&mas, val);
2049 } else {
2050 i++;
2051 }
2052 }
2053 val = mas_find(&mas, ULONG_MAX);
2054 MT_BUG_ON(mt, val != xa_mk_value(i));
2055
2056 mas_set(&mas, 0);
2057 i = 0;
2058 mas_for_each(&mas, val, 765) {
2059 MT_BUG_ON(mt, mas.index != i * 10);
2060 MT_BUG_ON(mt, mas.last != i * 10 + 9);
2061 /* Overwrite end of entry 76 and advance to the end */
2062 if (i == 76) {
2063 mas.index = 760;
2064 mas.last = 765;
2065 mas_store(&mas, val);
2066 }
2067 i++;
2068 }
2069 /* Make sure the next find returns the one after 765, 766-769 */
2070 val = mas_find(&mas, ULONG_MAX);
2071 MT_BUG_ON(mt, val != xa_mk_value(76));
2072 mas_unlock(&mas);
2073 mas_destroy(&mas);
2074 mt_set_non_kernel(0);
2075 }
2076
check_mas_store_gfp(struct maple_tree * mt)2077 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
2078 {
2079
2080 struct maple_tree newmt;
2081 int i, nr_entries = 135;
2082 void *val;
2083 MA_STATE(mas, mt, 0, 0);
2084 MA_STATE(newmas, mt, 0, 0);
2085
2086 for (i = 0; i <= nr_entries; i++)
2087 mtree_store_range(mt, i*10, i*10 + 5,
2088 xa_mk_value(i), GFP_KERNEL);
2089
2090 mt_set_non_kernel(99999);
2091 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2092 newmas.tree = &newmt;
2093 rcu_read_lock();
2094 mas_lock(&newmas);
2095 mas_reset(&newmas);
2096 mas_set(&mas, 0);
2097 mas_for_each(&mas, val, ULONG_MAX) {
2098 newmas.index = mas.index;
2099 newmas.last = mas.last;
2100 mas_store_gfp(&newmas, val, GFP_KERNEL);
2101 }
2102 mas_unlock(&newmas);
2103 rcu_read_unlock();
2104 mt_validate(&newmt);
2105 mt_set_non_kernel(0);
2106 mtree_destroy(&newmt);
2107 }
2108
2109 #if defined(BENCH_FORK)
bench_forking(void)2110 static noinline void __init bench_forking(void)
2111 {
2112 struct maple_tree mt, newmt;
2113 int i, nr_entries = 134, nr_fork = 80000, ret;
2114 void *val;
2115 MA_STATE(mas, &mt, 0, 0);
2116 MA_STATE(newmas, &newmt, 0, 0);
2117 struct rw_semaphore mt_lock, newmt_lock;
2118
2119 init_rwsem(&mt_lock);
2120 init_rwsem(&newmt_lock);
2121
2122 mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2123 mt_set_external_lock(&mt, &mt_lock);
2124
2125 down_write(&mt_lock);
2126 for (i = 0; i <= nr_entries; i++) {
2127 mas_set_range(&mas, i*10, i*10 + 5);
2128 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2129 }
2130
2131 for (i = 0; i < nr_fork; i++) {
2132 mt_init_flags(&newmt,
2133 MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2134 mt_set_external_lock(&newmt, &newmt_lock);
2135
2136 down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2137 ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2138 if (ret) {
2139 pr_err("OOM!");
2140 BUG_ON(1);
2141 }
2142
2143 mas_set(&newmas, 0);
2144 mas_for_each(&newmas, val, ULONG_MAX)
2145 mas_store(&newmas, val);
2146
2147 mas_destroy(&newmas);
2148 mt_validate(&newmt);
2149 __mt_destroy(&newmt);
2150 up_write(&newmt_lock);
2151 }
2152 mas_destroy(&mas);
2153 __mt_destroy(&mt);
2154 up_write(&mt_lock);
2155 }
2156 #endif
2157
next_prev_test(struct maple_tree * mt)2158 static noinline void __init next_prev_test(struct maple_tree *mt)
2159 {
2160 int i, nr_entries;
2161 void *val;
2162 MA_STATE(mas, mt, 0, 0);
2163 struct maple_enode *mn;
2164 static const unsigned long *level2;
2165 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2166 725};
2167 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2168 1760, 1765};
2169 unsigned long last_index;
2170
2171 if (MAPLE_32BIT) {
2172 nr_entries = 500;
2173 level2 = level2_32;
2174 last_index = 0x138e;
2175 } else {
2176 nr_entries = 200;
2177 level2 = level2_64;
2178 last_index = 0x7d6;
2179 }
2180
2181 for (i = 0; i <= nr_entries; i++)
2182 mtree_store_range(mt, i*10, i*10 + 5,
2183 xa_mk_value(i), GFP_KERNEL);
2184
2185 mas_lock(&mas);
2186 for (i = 0; i <= nr_entries / 2; i++) {
2187 mas_next(&mas, 1000);
2188 if (mas_is_none(&mas))
2189 break;
2190
2191 }
2192 mas_reset(&mas);
2193 mas_set(&mas, 0);
2194 i = 0;
2195 mas_for_each(&mas, val, 1000) {
2196 i++;
2197 }
2198
2199 mas_reset(&mas);
2200 mas_set(&mas, 0);
2201 i = 0;
2202 mas_for_each(&mas, val, 1000) {
2203 mas_pause(&mas);
2204 i++;
2205 }
2206
2207 /*
2208 * 680 - 685 = 0x61a00001930c
2209 * 686 - 689 = NULL;
2210 * 690 - 695 = 0x61a00001930c
2211 * Check simple next/prev
2212 */
2213 mas_set(&mas, 686);
2214 val = mas_walk(&mas);
2215 MT_BUG_ON(mt, val != NULL);
2216
2217 val = mas_next(&mas, 1000);
2218 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2219 MT_BUG_ON(mt, mas.index != 690);
2220 MT_BUG_ON(mt, mas.last != 695);
2221
2222 val = mas_prev(&mas, 0);
2223 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2224 MT_BUG_ON(mt, mas.index != 680);
2225 MT_BUG_ON(mt, mas.last != 685);
2226
2227 val = mas_next(&mas, 1000);
2228 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2229 MT_BUG_ON(mt, mas.index != 690);
2230 MT_BUG_ON(mt, mas.last != 695);
2231
2232 val = mas_next(&mas, 1000);
2233 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2234 MT_BUG_ON(mt, mas.index != 700);
2235 MT_BUG_ON(mt, mas.last != 705);
2236
2237 /* Check across node boundaries of the tree */
2238 mas_set(&mas, 70);
2239 val = mas_walk(&mas);
2240 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2241 MT_BUG_ON(mt, mas.index != 70);
2242 MT_BUG_ON(mt, mas.last != 75);
2243
2244 val = mas_next(&mas, 1000);
2245 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2246 MT_BUG_ON(mt, mas.index != 80);
2247 MT_BUG_ON(mt, mas.last != 85);
2248
2249 val = mas_prev(&mas, 70);
2250 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2251 MT_BUG_ON(mt, mas.index != 70);
2252 MT_BUG_ON(mt, mas.last != 75);
2253
2254 /* Check across two levels of the tree */
2255 mas_reset(&mas);
2256 mas_set(&mas, level2[0]);
2257 val = mas_walk(&mas);
2258 MT_BUG_ON(mt, val != NULL);
2259 val = mas_next(&mas, level2[1]);
2260 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2261 MT_BUG_ON(mt, mas.index != level2[2]);
2262 MT_BUG_ON(mt, mas.last != level2[3]);
2263 mn = mas.node;
2264
2265 val = mas_next(&mas, level2[1]);
2266 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2267 MT_BUG_ON(mt, mas.index != level2[4]);
2268 MT_BUG_ON(mt, mas.last != level2[5]);
2269 MT_BUG_ON(mt, mn == mas.node);
2270
2271 val = mas_prev(&mas, 0);
2272 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2273 MT_BUG_ON(mt, mas.index != level2[2]);
2274 MT_BUG_ON(mt, mas.last != level2[3]);
2275
2276 /* Check running off the end and back on */
2277 mas_set(&mas, nr_entries * 10);
2278 val = mas_walk(&mas);
2279 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2280 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2281 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2282
2283 val = mas_next(&mas, ULONG_MAX);
2284 MT_BUG_ON(mt, val != NULL);
2285 MT_BUG_ON(mt, mas.index != last_index);
2286 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2287
2288 val = mas_prev(&mas, 0);
2289 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2290 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2291 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2292
2293 /* Check running off the start and back on */
2294 mas_reset(&mas);
2295 mas_set(&mas, 10);
2296 val = mas_walk(&mas);
2297 MT_BUG_ON(mt, val != xa_mk_value(1));
2298 MT_BUG_ON(mt, mas.index != 10);
2299 MT_BUG_ON(mt, mas.last != 15);
2300
2301 val = mas_prev(&mas, 0);
2302 MT_BUG_ON(mt, val != xa_mk_value(0));
2303 MT_BUG_ON(mt, mas.index != 0);
2304 MT_BUG_ON(mt, mas.last != 5);
2305
2306 val = mas_prev(&mas, 0);
2307 MT_BUG_ON(mt, val != NULL);
2308 MT_BUG_ON(mt, mas.index != 0);
2309 MT_BUG_ON(mt, mas.last != 5);
2310 MT_BUG_ON(mt, !mas_is_underflow(&mas));
2311
2312 mas.index = 0;
2313 mas.last = 5;
2314 mas_store(&mas, NULL);
2315 mas_reset(&mas);
2316 mas_set(&mas, 10);
2317 mas_walk(&mas);
2318
2319 val = mas_prev(&mas, 0);
2320 MT_BUG_ON(mt, val != NULL);
2321 MT_BUG_ON(mt, mas.index != 0);
2322 MT_BUG_ON(mt, mas.last != 9);
2323 mas_unlock(&mas);
2324
2325 mtree_destroy(mt);
2326
2327 mt_init(mt);
2328 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2329 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2330 rcu_read_lock();
2331 mas_set(&mas, 5);
2332 val = mas_prev(&mas, 4);
2333 MT_BUG_ON(mt, val != NULL);
2334 rcu_read_unlock();
2335 }
2336
2337
2338
2339 /* Test spanning writes that require balancing right sibling or right cousin */
check_spanning_relatives(struct maple_tree * mt)2340 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2341 {
2342
2343 unsigned long i, nr_entries = 1000;
2344
2345 for (i = 0; i <= nr_entries; i++)
2346 mtree_store_range(mt, i*10, i*10 + 5,
2347 xa_mk_value(i), GFP_KERNEL);
2348
2349
2350 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2351 }
2352
check_fuzzer(struct maple_tree * mt)2353 static noinline void __init check_fuzzer(struct maple_tree *mt)
2354 {
2355 /*
2356 * 1. Causes a spanning rebalance of a single root node.
2357 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2358 * entire right side is consumed.
2359 */
2360 mtree_test_insert(mt, 88, (void *)0xb1);
2361 mtree_test_insert(mt, 84, (void *)0xa9);
2362 mtree_test_insert(mt, 2, (void *)0x5);
2363 mtree_test_insert(mt, 4, (void *)0x9);
2364 mtree_test_insert(mt, 14, (void *)0x1d);
2365 mtree_test_insert(mt, 7, (void *)0xf);
2366 mtree_test_insert(mt, 12, (void *)0x19);
2367 mtree_test_insert(mt, 18, (void *)0x25);
2368 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2369 mtree_destroy(mt);
2370
2371
2372 /*
2373 * 2. Cause a spanning rebalance of two nodes in root.
2374 * Fixed by setting mast->r->max correctly.
2375 */
2376 mt_init_flags(mt, 0);
2377 mtree_test_store(mt, 87, (void *)0xaf);
2378 mtree_test_store(mt, 0, (void *)0x1);
2379 mtree_test_load(mt, 4);
2380 mtree_test_insert(mt, 4, (void *)0x9);
2381 mtree_test_store(mt, 8, (void *)0x11);
2382 mtree_test_store(mt, 44, (void *)0x59);
2383 mtree_test_store(mt, 68, (void *)0x89);
2384 mtree_test_store(mt, 2, (void *)0x5);
2385 mtree_test_insert(mt, 43, (void *)0x57);
2386 mtree_test_insert(mt, 24, (void *)0x31);
2387 mtree_test_insert(mt, 844, (void *)0x699);
2388 mtree_test_store(mt, 84, (void *)0xa9);
2389 mtree_test_store(mt, 4, (void *)0x9);
2390 mtree_test_erase(mt, 4);
2391 mtree_test_load(mt, 5);
2392 mtree_test_erase(mt, 0);
2393 mtree_destroy(mt);
2394
2395 /*
2396 * 3. Cause a node overflow on copy
2397 * Fixed by using the correct check for node size in mas_wr_modify()
2398 * Also discovered issue with metadata setting.
2399 */
2400 mt_init_flags(mt, 0);
2401 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2402 mtree_test_store(mt, 4, (void *)0x9);
2403 mtree_test_erase(mt, 5);
2404 mtree_test_erase(mt, 0);
2405 mtree_test_erase(mt, 4);
2406 mtree_test_store(mt, 5, (void *)0xb);
2407 mtree_test_erase(mt, 5);
2408 mtree_test_store(mt, 5, (void *)0xb);
2409 mtree_test_erase(mt, 5);
2410 mtree_test_erase(mt, 4);
2411 mtree_test_store(mt, 4, (void *)0x9);
2412 mtree_test_store(mt, 444, (void *)0x379);
2413 mtree_test_store(mt, 0, (void *)0x1);
2414 mtree_test_load(mt, 0);
2415 mtree_test_store(mt, 5, (void *)0xb);
2416 mtree_test_erase(mt, 0);
2417 mtree_destroy(mt);
2418
2419 /*
2420 * 4. spanning store failure due to writing incorrect pivot value at
2421 * last slot.
2422 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2423 *
2424 */
2425 mt_init_flags(mt, 0);
2426 mtree_test_insert(mt, 261, (void *)0x20b);
2427 mtree_test_store(mt, 516, (void *)0x409);
2428 mtree_test_store(mt, 6, (void *)0xd);
2429 mtree_test_insert(mt, 5, (void *)0xb);
2430 mtree_test_insert(mt, 1256, (void *)0x9d1);
2431 mtree_test_store(mt, 4, (void *)0x9);
2432 mtree_test_erase(mt, 1);
2433 mtree_test_store(mt, 56, (void *)0x71);
2434 mtree_test_insert(mt, 1, (void *)0x3);
2435 mtree_test_store(mt, 24, (void *)0x31);
2436 mtree_test_erase(mt, 1);
2437 mtree_test_insert(mt, 2263, (void *)0x11af);
2438 mtree_test_insert(mt, 446, (void *)0x37d);
2439 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2440 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2441 mtree_destroy(mt);
2442
2443 /*
2444 * 5. mas_wr_extend_null() may overflow slots.
2445 * Fix by checking against wr_mas->node_end.
2446 */
2447 mt_init_flags(mt, 0);
2448 mtree_test_store(mt, 48, (void *)0x61);
2449 mtree_test_store(mt, 3, (void *)0x7);
2450 mtree_test_load(mt, 0);
2451 mtree_test_store(mt, 88, (void *)0xb1);
2452 mtree_test_store(mt, 81, (void *)0xa3);
2453 mtree_test_insert(mt, 0, (void *)0x1);
2454 mtree_test_insert(mt, 8, (void *)0x11);
2455 mtree_test_insert(mt, 4, (void *)0x9);
2456 mtree_test_insert(mt, 2480, (void *)0x1361);
2457 mtree_test_insert(mt, ULONG_MAX,
2458 (void *)0xffffffffffffffff);
2459 mtree_test_erase(mt, ULONG_MAX);
2460 mtree_destroy(mt);
2461
2462 /*
2463 * 6. When reusing a node with an implied pivot and the node is
2464 * shrinking, old data would be left in the implied slot
2465 * Fixed by checking the last pivot for the mas->max and clear
2466 * accordingly. This only affected the left-most node as that node is
2467 * the only one allowed to end in NULL.
2468 */
2469 mt_init_flags(mt, 0);
2470 mtree_test_erase(mt, 3);
2471 mtree_test_insert(mt, 22, (void *)0x2d);
2472 mtree_test_insert(mt, 15, (void *)0x1f);
2473 mtree_test_load(mt, 2);
2474 mtree_test_insert(mt, 1, (void *)0x3);
2475 mtree_test_insert(mt, 1, (void *)0x3);
2476 mtree_test_insert(mt, 5, (void *)0xb);
2477 mtree_test_erase(mt, 1);
2478 mtree_test_insert(mt, 1, (void *)0x3);
2479 mtree_test_insert(mt, 4, (void *)0x9);
2480 mtree_test_insert(mt, 1, (void *)0x3);
2481 mtree_test_erase(mt, 1);
2482 mtree_test_insert(mt, 2, (void *)0x5);
2483 mtree_test_insert(mt, 1, (void *)0x3);
2484 mtree_test_erase(mt, 3);
2485 mtree_test_insert(mt, 22, (void *)0x2d);
2486 mtree_test_insert(mt, 15, (void *)0x1f);
2487 mtree_test_insert(mt, 2, (void *)0x5);
2488 mtree_test_insert(mt, 1, (void *)0x3);
2489 mtree_test_insert(mt, 8, (void *)0x11);
2490 mtree_test_load(mt, 2);
2491 mtree_test_insert(mt, 1, (void *)0x3);
2492 mtree_test_insert(mt, 1, (void *)0x3);
2493 mtree_test_store(mt, 1, (void *)0x3);
2494 mtree_test_insert(mt, 5, (void *)0xb);
2495 mtree_test_erase(mt, 1);
2496 mtree_test_insert(mt, 1, (void *)0x3);
2497 mtree_test_insert(mt, 4, (void *)0x9);
2498 mtree_test_insert(mt, 1, (void *)0x3);
2499 mtree_test_erase(mt, 1);
2500 mtree_test_insert(mt, 2, (void *)0x5);
2501 mtree_test_insert(mt, 1, (void *)0x3);
2502 mtree_test_erase(mt, 3);
2503 mtree_test_insert(mt, 22, (void *)0x2d);
2504 mtree_test_insert(mt, 15, (void *)0x1f);
2505 mtree_test_insert(mt, 2, (void *)0x5);
2506 mtree_test_insert(mt, 1, (void *)0x3);
2507 mtree_test_insert(mt, 8, (void *)0x11);
2508 mtree_test_insert(mt, 12, (void *)0x19);
2509 mtree_test_erase(mt, 1);
2510 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2511 mtree_test_erase(mt, 62);
2512 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2513 mtree_test_insert(mt, 11, (void *)0x17);
2514 mtree_test_insert(mt, 3, (void *)0x7);
2515 mtree_test_insert(mt, 3, (void *)0x7);
2516 mtree_test_store(mt, 62, (void *)0x7d);
2517 mtree_test_erase(mt, 62);
2518 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2519 mtree_test_erase(mt, 1);
2520 mtree_test_insert(mt, 22, (void *)0x2d);
2521 mtree_test_insert(mt, 12, (void *)0x19);
2522 mtree_test_erase(mt, 1);
2523 mtree_test_insert(mt, 3, (void *)0x7);
2524 mtree_test_store(mt, 62, (void *)0x7d);
2525 mtree_test_erase(mt, 62);
2526 mtree_test_insert(mt, 122, (void *)0xf5);
2527 mtree_test_store(mt, 3, (void *)0x7);
2528 mtree_test_insert(mt, 0, (void *)0x1);
2529 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2530 mtree_test_insert(mt, 85, (void *)0xab);
2531 mtree_test_insert(mt, 72, (void *)0x91);
2532 mtree_test_insert(mt, 81, (void *)0xa3);
2533 mtree_test_insert(mt, 726, (void *)0x5ad);
2534 mtree_test_insert(mt, 0, (void *)0x1);
2535 mtree_test_insert(mt, 1, (void *)0x3);
2536 mtree_test_store(mt, 51, (void *)0x67);
2537 mtree_test_insert(mt, 611, (void *)0x4c7);
2538 mtree_test_insert(mt, 485, (void *)0x3cb);
2539 mtree_test_insert(mt, 1, (void *)0x3);
2540 mtree_test_erase(mt, 1);
2541 mtree_test_insert(mt, 0, (void *)0x1);
2542 mtree_test_insert(mt, 1, (void *)0x3);
2543 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2544 mtree_test_load(mt, 1);
2545 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2546 mtree_test_insert(mt, 1, (void *)0x3);
2547 mtree_test_erase(mt, 1);
2548 mtree_test_load(mt, 53);
2549 mtree_test_load(mt, 1);
2550 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2551 mtree_test_insert(mt, 222, (void *)0x1bd);
2552 mtree_test_insert(mt, 485, (void *)0x3cb);
2553 mtree_test_insert(mt, 1, (void *)0x3);
2554 mtree_test_erase(mt, 1);
2555 mtree_test_load(mt, 0);
2556 mtree_test_insert(mt, 21, (void *)0x2b);
2557 mtree_test_insert(mt, 3, (void *)0x7);
2558 mtree_test_store(mt, 621, (void *)0x4db);
2559 mtree_test_insert(mt, 0, (void *)0x1);
2560 mtree_test_erase(mt, 5);
2561 mtree_test_insert(mt, 1, (void *)0x3);
2562 mtree_test_store(mt, 62, (void *)0x7d);
2563 mtree_test_erase(mt, 62);
2564 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2565 mtree_test_insert(mt, 22, (void *)0x2d);
2566 mtree_test_insert(mt, 12, (void *)0x19);
2567 mtree_test_erase(mt, 1);
2568 mtree_test_insert(mt, 1, (void *)0x3);
2569 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2570 mtree_test_erase(mt, 62);
2571 mtree_test_erase(mt, 1);
2572 mtree_test_load(mt, 1);
2573 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2574 mtree_test_insert(mt, 1, (void *)0x3);
2575 mtree_test_erase(mt, 1);
2576 mtree_test_load(mt, 53);
2577 mtree_test_load(mt, 1);
2578 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2579 mtree_test_insert(mt, 222, (void *)0x1bd);
2580 mtree_test_insert(mt, 485, (void *)0x3cb);
2581 mtree_test_insert(mt, 1, (void *)0x3);
2582 mtree_test_erase(mt, 1);
2583 mtree_test_insert(mt, 1, (void *)0x3);
2584 mtree_test_load(mt, 0);
2585 mtree_test_load(mt, 0);
2586 mtree_destroy(mt);
2587
2588 /*
2589 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2590 * data by overwriting it first - that way metadata is of no concern.
2591 */
2592 mt_init_flags(mt, 0);
2593 mtree_test_load(mt, 1);
2594 mtree_test_insert(mt, 102, (void *)0xcd);
2595 mtree_test_erase(mt, 2);
2596 mtree_test_erase(mt, 0);
2597 mtree_test_load(mt, 0);
2598 mtree_test_insert(mt, 4, (void *)0x9);
2599 mtree_test_insert(mt, 2, (void *)0x5);
2600 mtree_test_insert(mt, 110, (void *)0xdd);
2601 mtree_test_insert(mt, 1, (void *)0x3);
2602 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2603 mtree_test_erase(mt, 2);
2604 mtree_test_store(mt, 0, (void *)0x1);
2605 mtree_test_store(mt, 112, (void *)0xe1);
2606 mtree_test_insert(mt, 21, (void *)0x2b);
2607 mtree_test_store(mt, 1, (void *)0x3);
2608 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2609 mtree_test_store(mt, 2, (void *)0x5);
2610 mtree_test_load(mt, 22);
2611 mtree_test_erase(mt, 2);
2612 mtree_test_store(mt, 210, (void *)0x1a5);
2613 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2614 mtree_test_store(mt, 2, (void *)0x5);
2615 mtree_test_erase(mt, 2);
2616 mtree_test_erase(mt, 22);
2617 mtree_test_erase(mt, 1);
2618 mtree_test_erase(mt, 2);
2619 mtree_test_store(mt, 0, (void *)0x1);
2620 mtree_test_load(mt, 112);
2621 mtree_test_insert(mt, 2, (void *)0x5);
2622 mtree_test_erase(mt, 2);
2623 mtree_test_store(mt, 1, (void *)0x3);
2624 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2625 mtree_test_erase(mt, 0);
2626 mtree_test_erase(mt, 2);
2627 mtree_test_store(mt, 2, (void *)0x5);
2628 mtree_test_erase(mt, 0);
2629 mtree_test_erase(mt, 2);
2630 mtree_test_store(mt, 0, (void *)0x1);
2631 mtree_test_store(mt, 0, (void *)0x1);
2632 mtree_test_erase(mt, 2);
2633 mtree_test_store(mt, 2, (void *)0x5);
2634 mtree_test_erase(mt, 2);
2635 mtree_test_insert(mt, 2, (void *)0x5);
2636 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2637 mtree_test_erase(mt, 0);
2638 mtree_test_erase(mt, 2);
2639 mtree_test_store(mt, 0, (void *)0x1);
2640 mtree_test_load(mt, 112);
2641 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2642 mtree_test_store(mt, 2, (void *)0x5);
2643 mtree_test_load(mt, 110);
2644 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2645 mtree_test_load(mt, 2);
2646 mtree_test_store(mt, 2, (void *)0x5);
2647 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2648 mtree_test_erase(mt, 12);
2649 mtree_test_store(mt, 2, (void *)0x5);
2650 mtree_test_load(mt, 22);
2651 mtree_destroy(mt);
2652
2653
2654 /*
2655 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2656 * may be set incorrectly to the final pivot and not the right max.
2657 * Fix by setting the left max to orig right max if the entire node is
2658 * consumed.
2659 */
2660 mt_init_flags(mt, 0);
2661 mtree_test_store(mt, 6, (void *)0xd);
2662 mtree_test_store(mt, 67, (void *)0x87);
2663 mtree_test_insert(mt, 15, (void *)0x1f);
2664 mtree_test_insert(mt, 6716, (void *)0x3479);
2665 mtree_test_store(mt, 61, (void *)0x7b);
2666 mtree_test_insert(mt, 13, (void *)0x1b);
2667 mtree_test_store(mt, 8, (void *)0x11);
2668 mtree_test_insert(mt, 1, (void *)0x3);
2669 mtree_test_load(mt, 0);
2670 mtree_test_erase(mt, 67167);
2671 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2672 mtree_test_insert(mt, 6, (void *)0xd);
2673 mtree_test_erase(mt, 67);
2674 mtree_test_insert(mt, 1, (void *)0x3);
2675 mtree_test_erase(mt, 667167);
2676 mtree_test_insert(mt, 6, (void *)0xd);
2677 mtree_test_store(mt, 67, (void *)0x87);
2678 mtree_test_insert(mt, 5, (void *)0xb);
2679 mtree_test_erase(mt, 1);
2680 mtree_test_insert(mt, 6, (void *)0xd);
2681 mtree_test_erase(mt, 67);
2682 mtree_test_insert(mt, 15, (void *)0x1f);
2683 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2684 mtree_test_insert(mt, 1, (void *)0x3);
2685 mtree_test_load(mt, 7);
2686 mtree_test_insert(mt, 16, (void *)0x21);
2687 mtree_test_insert(mt, 36, (void *)0x49);
2688 mtree_test_store(mt, 67, (void *)0x87);
2689 mtree_test_store(mt, 6, (void *)0xd);
2690 mtree_test_insert(mt, 367, (void *)0x2df);
2691 mtree_test_insert(mt, 115, (void *)0xe7);
2692 mtree_test_store(mt, 0, (void *)0x1);
2693 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2694 mtree_test_store(mt, 1, (void *)0x3);
2695 mtree_test_erase(mt, 67167);
2696 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2697 mtree_test_store(mt, 1, (void *)0x3);
2698 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2699 mtree_test_load(mt, 67);
2700 mtree_test_insert(mt, 1, (void *)0x3);
2701 mtree_test_erase(mt, 67167);
2702 mtree_destroy(mt);
2703
2704 /*
2705 * 9. spanning store to the end of data caused an invalid metadata
2706 * length which resulted in a crash eventually.
2707 * Fix by checking if there is a value in pivot before incrementing the
2708 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2709 * abstract the two locations this happens into a function called
2710 * mas_leaf_set_meta().
2711 */
2712 mt_init_flags(mt, 0);
2713 mtree_test_insert(mt, 21, (void *)0x2b);
2714 mtree_test_insert(mt, 12, (void *)0x19);
2715 mtree_test_insert(mt, 6, (void *)0xd);
2716 mtree_test_insert(mt, 8, (void *)0x11);
2717 mtree_test_insert(mt, 2, (void *)0x5);
2718 mtree_test_insert(mt, 91, (void *)0xb7);
2719 mtree_test_insert(mt, 18, (void *)0x25);
2720 mtree_test_insert(mt, 81, (void *)0xa3);
2721 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2722 mtree_test_store(mt, 1, (void *)0x3);
2723 mtree_test_erase(mt, 8);
2724 mtree_test_insert(mt, 11, (void *)0x17);
2725 mtree_test_insert(mt, 8, (void *)0x11);
2726 mtree_test_insert(mt, 21, (void *)0x2b);
2727 mtree_test_insert(mt, 2, (void *)0x5);
2728 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2729 mtree_test_erase(mt, ULONG_MAX - 10);
2730 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2731 mtree_test_erase(mt, 2);
2732 mtree_test_insert(mt, 1211, (void *)0x977);
2733 mtree_test_insert(mt, 111, (void *)0xdf);
2734 mtree_test_insert(mt, 13, (void *)0x1b);
2735 mtree_test_insert(mt, 211, (void *)0x1a7);
2736 mtree_test_insert(mt, 11, (void *)0x17);
2737 mtree_test_insert(mt, 5, (void *)0xb);
2738 mtree_test_insert(mt, 1218, (void *)0x985);
2739 mtree_test_insert(mt, 61, (void *)0x7b);
2740 mtree_test_store(mt, 1, (void *)0x3);
2741 mtree_test_insert(mt, 121, (void *)0xf3);
2742 mtree_test_insert(mt, 8, (void *)0x11);
2743 mtree_test_insert(mt, 21, (void *)0x2b);
2744 mtree_test_insert(mt, 2, (void *)0x5);
2745 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2746 mtree_test_erase(mt, ULONG_MAX - 10);
2747 }
2748
2749 /* duplicate the tree with a specific gap */
check_dup_gaps(struct maple_tree * mt,unsigned long nr_entries,bool zero_start,unsigned long gap)2750 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2751 unsigned long nr_entries, bool zero_start,
2752 unsigned long gap)
2753 {
2754 unsigned long i = 0;
2755 struct maple_tree newmt;
2756 int ret;
2757 void *tmp;
2758 MA_STATE(mas, mt, 0, 0);
2759 MA_STATE(newmas, &newmt, 0, 0);
2760 struct rw_semaphore newmt_lock;
2761
2762 init_rwsem(&newmt_lock);
2763 mt_set_external_lock(&newmt, &newmt_lock);
2764
2765 if (!zero_start)
2766 i = 1;
2767
2768 mt_zero_nr_tallocated();
2769 for (; i <= nr_entries; i++)
2770 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2771 xa_mk_value(i), GFP_KERNEL);
2772
2773 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2774 mt_set_non_kernel(99999);
2775 down_write(&newmt_lock);
2776 ret = mas_expected_entries(&newmas, nr_entries);
2777 mt_set_non_kernel(0);
2778 MT_BUG_ON(mt, ret != 0);
2779
2780 rcu_read_lock();
2781 mas_for_each(&mas, tmp, ULONG_MAX) {
2782 newmas.index = mas.index;
2783 newmas.last = mas.last;
2784 mas_store(&newmas, tmp);
2785 }
2786 rcu_read_unlock();
2787 mas_destroy(&newmas);
2788
2789 __mt_destroy(&newmt);
2790 up_write(&newmt_lock);
2791 }
2792
2793 /* Duplicate many sizes of trees. Mainly to test expected entry values */
check_dup(struct maple_tree * mt)2794 static noinline void __init check_dup(struct maple_tree *mt)
2795 {
2796 int i;
2797 int big_start = 100010;
2798
2799 /* Check with a value at zero */
2800 for (i = 10; i < 1000; i++) {
2801 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2802 check_dup_gaps(mt, i, true, 5);
2803 mtree_destroy(mt);
2804 rcu_barrier();
2805 }
2806
2807 cond_resched();
2808 mt_cache_shrink();
2809 /* Check with a value at zero, no gap */
2810 for (i = 1000; i < 2000; i++) {
2811 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2812 check_dup_gaps(mt, i, true, 0);
2813 mtree_destroy(mt);
2814 rcu_barrier();
2815 }
2816
2817 cond_resched();
2818 mt_cache_shrink();
2819 /* Check with a value at zero and unreasonably large */
2820 for (i = big_start; i < big_start + 10; i++) {
2821 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2822 check_dup_gaps(mt, i, true, 5);
2823 mtree_destroy(mt);
2824 rcu_barrier();
2825 }
2826
2827 cond_resched();
2828 mt_cache_shrink();
2829 /* Small to medium size not starting at zero*/
2830 for (i = 200; i < 1000; i++) {
2831 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2832 check_dup_gaps(mt, i, false, 5);
2833 mtree_destroy(mt);
2834 rcu_barrier();
2835 }
2836
2837 cond_resched();
2838 mt_cache_shrink();
2839 /* Unreasonably large not starting at zero*/
2840 for (i = big_start; i < big_start + 10; i++) {
2841 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2842 check_dup_gaps(mt, i, false, 5);
2843 mtree_destroy(mt);
2844 rcu_barrier();
2845 cond_resched();
2846 mt_cache_shrink();
2847 }
2848
2849 /* Check non-allocation tree not starting at zero */
2850 for (i = 1500; i < 3000; i++) {
2851 mt_init_flags(mt, 0);
2852 check_dup_gaps(mt, i, false, 5);
2853 mtree_destroy(mt);
2854 rcu_barrier();
2855 cond_resched();
2856 if (i % 2 == 0)
2857 mt_cache_shrink();
2858 }
2859
2860 mt_cache_shrink();
2861 /* Check non-allocation tree starting at zero */
2862 for (i = 200; i < 1000; i++) {
2863 mt_init_flags(mt, 0);
2864 check_dup_gaps(mt, i, true, 5);
2865 mtree_destroy(mt);
2866 rcu_barrier();
2867 cond_resched();
2868 }
2869
2870 mt_cache_shrink();
2871 /* Unreasonably large */
2872 for (i = big_start + 5; i < big_start + 10; i++) {
2873 mt_init_flags(mt, 0);
2874 check_dup_gaps(mt, i, true, 5);
2875 mtree_destroy(mt);
2876 rcu_barrier();
2877 mt_cache_shrink();
2878 cond_resched();
2879 }
2880 }
2881
check_bnode_min_spanning(struct maple_tree * mt)2882 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2883 {
2884 int i = 50;
2885 MA_STATE(mas, mt, 0, 0);
2886
2887 mt_set_non_kernel(9999);
2888 mas_lock(&mas);
2889 do {
2890 mas_set_range(&mas, i*10, i*10+9);
2891 mas_store(&mas, check_bnode_min_spanning);
2892 } while (i--);
2893
2894 mas_set_range(&mas, 240, 509);
2895 mas_store(&mas, NULL);
2896 mas_unlock(&mas);
2897 mas_destroy(&mas);
2898 mt_set_non_kernel(0);
2899 }
2900
check_empty_area_window(struct maple_tree * mt)2901 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2902 {
2903 unsigned long i, nr_entries = 20;
2904 MA_STATE(mas, mt, 0, 0);
2905
2906 for (i = 1; i <= nr_entries; i++)
2907 mtree_store_range(mt, i*10, i*10 + 9,
2908 xa_mk_value(i), GFP_KERNEL);
2909
2910 /* Create another hole besides the one at 0 */
2911 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2912
2913 /* Check lower bounds that don't fit */
2914 rcu_read_lock();
2915 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2916
2917 mas_reset(&mas);
2918 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2919
2920 /* Check lower bound that does fit */
2921 mas_reset(&mas);
2922 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2923 MT_BUG_ON(mt, mas.index != 5);
2924 MT_BUG_ON(mt, mas.last != 9);
2925 rcu_read_unlock();
2926
2927 /* Check one gap that doesn't fit and one that does */
2928 rcu_read_lock();
2929 mas_reset(&mas);
2930 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2931 MT_BUG_ON(mt, mas.index != 161);
2932 MT_BUG_ON(mt, mas.last != 169);
2933
2934 /* Check one gap that does fit above the min */
2935 mas_reset(&mas);
2936 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2937 MT_BUG_ON(mt, mas.index != 216);
2938 MT_BUG_ON(mt, mas.last != 218);
2939
2940 /* Check size that doesn't fit any gap */
2941 mas_reset(&mas);
2942 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2943
2944 /*
2945 * Check size that doesn't fit the lower end of the window but
2946 * does fit the gap
2947 */
2948 mas_reset(&mas);
2949 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2950
2951 /*
2952 * Check size that doesn't fit the upper end of the window but
2953 * does fit the gap
2954 */
2955 mas_reset(&mas);
2956 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2957
2958 /* Check mas_empty_area forward */
2959 mas_reset(&mas);
2960 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2961 MT_BUG_ON(mt, mas.index != 0);
2962 MT_BUG_ON(mt, mas.last != 8);
2963
2964 mas_reset(&mas);
2965 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2966 MT_BUG_ON(mt, mas.index != 0);
2967 MT_BUG_ON(mt, mas.last != 3);
2968
2969 mas_reset(&mas);
2970 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2971
2972 mas_reset(&mas);
2973 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2974
2975 mas_reset(&mas);
2976 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2977
2978 mas_reset(&mas);
2979 mas_empty_area(&mas, 100, 165, 3);
2980
2981 mas_reset(&mas);
2982 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2983 rcu_read_unlock();
2984 }
2985
check_empty_area_fill(struct maple_tree * mt)2986 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2987 {
2988 const unsigned long max = 0x25D78000;
2989 unsigned long size;
2990 int loop, shift;
2991 MA_STATE(mas, mt, 0, 0);
2992
2993 mt_set_non_kernel(99999);
2994 for (shift = 12; shift <= 16; shift++) {
2995 loop = 5000;
2996 size = 1 << shift;
2997 while (loop--) {
2998 mas_set(&mas, 0);
2999 mas_lock(&mas);
3000 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
3001 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
3002 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
3003 mas_unlock(&mas);
3004 mas_reset(&mas);
3005 }
3006 }
3007
3008 /* No space left. */
3009 size = 0x1000;
3010 rcu_read_lock();
3011 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
3012 rcu_read_unlock();
3013
3014 /* Fill a depth 3 node to the maximum */
3015 for (unsigned long i = 629440511; i <= 629440800; i += 6)
3016 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
3017 /* Make space in the second-last depth 4 node */
3018 mtree_erase(mt, 631668735);
3019 /* Make space in the last depth 4 node */
3020 mtree_erase(mt, 629506047);
3021 mas_reset(&mas);
3022 /* Search from just after the gap in the second-last depth 4 */
3023 rcu_read_lock();
3024 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
3025 rcu_read_unlock();
3026 mt_set_non_kernel(0);
3027 }
3028
3029 /*
3030 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
3031 *
3032 * The table below shows the single entry tree (0-0 pointer) and normal tree
3033 * with nodes.
3034 *
3035 * Function ENTRY Start Result index & last
3036 * ┬ ┬ ┬ ┬ ┬
3037 * │ │ │ │ └─ the final range
3038 * │ │ │ └─ The node value after execution
3039 * │ │ └─ The node value before execution
3040 * │ └─ If the entry exists or does not exists (DNE)
3041 * └─ The function name
3042 *
3043 * Function ENTRY Start Result index & last
3044 * mas_next()
3045 * - after last
3046 * Single entry tree at 0-0
3047 * ------------------------
3048 * DNE MAS_START MAS_NONE 1 - oo
3049 * DNE MAS_PAUSE MAS_NONE 1 - oo
3050 * DNE MAS_ROOT MAS_NONE 1 - oo
3051 * when index = 0
3052 * DNE MAS_NONE MAS_ROOT 0
3053 * when index > 0
3054 * DNE MAS_NONE MAS_NONE 1 - oo
3055 *
3056 * Normal tree
3057 * -----------
3058 * exists MAS_START active range
3059 * DNE MAS_START active set to last range
3060 * exists MAS_PAUSE active range
3061 * DNE MAS_PAUSE active set to last range
3062 * exists MAS_NONE active range
3063 * exists active active range
3064 * DNE active active set to last range
3065 * ERANGE active MAS_OVERFLOW last range
3066 *
3067 * Function ENTRY Start Result index & last
3068 * mas_prev()
3069 * - before index
3070 * Single entry tree at 0-0
3071 * ------------------------
3072 * if index > 0
3073 * exists MAS_START MAS_ROOT 0
3074 * exists MAS_PAUSE MAS_ROOT 0
3075 * exists MAS_NONE MAS_ROOT 0
3076 *
3077 * if index == 0
3078 * DNE MAS_START MAS_NONE 0
3079 * DNE MAS_PAUSE MAS_NONE 0
3080 * DNE MAS_NONE MAS_NONE 0
3081 * DNE MAS_ROOT MAS_NONE 0
3082 *
3083 * Normal tree
3084 * -----------
3085 * exists MAS_START active range
3086 * DNE MAS_START active set to min
3087 * exists MAS_PAUSE active range
3088 * DNE MAS_PAUSE active set to min
3089 * exists MAS_NONE active range
3090 * DNE MAS_NONE MAS_NONE set to min
3091 * any MAS_ROOT MAS_NONE 0
3092 * exists active active range
3093 * DNE active active last range
3094 * ERANGE active MAS_UNDERFLOW last range
3095 *
3096 * Function ENTRY Start Result index & last
3097 * mas_find()
3098 * - at index or next
3099 * Single entry tree at 0-0
3100 * ------------------------
3101 * if index > 0
3102 * DNE MAS_START MAS_NONE 0
3103 * DNE MAS_PAUSE MAS_NONE 0
3104 * DNE MAS_ROOT MAS_NONE 0
3105 * DNE MAS_NONE MAS_NONE 1
3106 * if index == 0
3107 * exists MAS_START MAS_ROOT 0
3108 * exists MAS_PAUSE MAS_ROOT 0
3109 * exists MAS_NONE MAS_ROOT 0
3110 *
3111 * Normal tree
3112 * -----------
3113 * exists MAS_START active range
3114 * DNE MAS_START active set to max
3115 * exists MAS_PAUSE active range
3116 * DNE MAS_PAUSE active set to max
3117 * exists MAS_NONE active range (start at last)
3118 * exists active active range
3119 * DNE active active last range (max < last)
3120 *
3121 * Function ENTRY Start Result index & last
3122 * mas_find_rev()
3123 * - at index or before
3124 * Single entry tree at 0-0
3125 * ------------------------
3126 * if index > 0
3127 * exists MAS_START MAS_ROOT 0
3128 * exists MAS_PAUSE MAS_ROOT 0
3129 * exists MAS_NONE MAS_ROOT 0
3130 * if index == 0
3131 * DNE MAS_START MAS_NONE 0
3132 * DNE MAS_PAUSE MAS_NONE 0
3133 * DNE MAS_NONE MAS_NONE 0
3134 * DNE MAS_ROOT MAS_NONE 0
3135 *
3136 * Normal tree
3137 * -----------
3138 * exists MAS_START active range
3139 * DNE MAS_START active set to min
3140 * exists MAS_PAUSE active range
3141 * DNE MAS_PAUSE active set to min
3142 * exists MAS_NONE active range (start at index)
3143 * exists active active range
3144 * DNE active active last range (min > index)
3145 *
3146 * Function ENTRY Start Result index & last
3147 * mas_walk()
3148 * - Look up index
3149 * Single entry tree at 0-0
3150 * ------------------------
3151 * if index > 0
3152 * DNE MAS_START MAS_ROOT 1 - oo
3153 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3154 * DNE MAS_NONE MAS_ROOT 1 - oo
3155 * DNE MAS_ROOT MAS_ROOT 1 - oo
3156 * if index == 0
3157 * exists MAS_START MAS_ROOT 0
3158 * exists MAS_PAUSE MAS_ROOT 0
3159 * exists MAS_NONE MAS_ROOT 0
3160 * exists MAS_ROOT MAS_ROOT 0
3161 *
3162 * Normal tree
3163 * -----------
3164 * exists MAS_START active range
3165 * DNE MAS_START active range of NULL
3166 * exists MAS_PAUSE active range
3167 * DNE MAS_PAUSE active range of NULL
3168 * exists MAS_NONE active range
3169 * DNE MAS_NONE active range of NULL
3170 * exists active active range
3171 * DNE active active range of NULL
3172 */
3173
check_state_handling(struct maple_tree * mt)3174 static noinline void __init check_state_handling(struct maple_tree *mt)
3175 {
3176 MA_STATE(mas, mt, 0, 0);
3177 void *entry, *ptr = (void *) 0x1234500;
3178 void *ptr2 = &ptr;
3179 void *ptr3 = &ptr2;
3180
3181 /* Check MAS_ROOT First */
3182 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3183
3184 mas_lock(&mas);
3185 /* prev: Start -> underflow*/
3186 entry = mas_prev(&mas, 0);
3187 MT_BUG_ON(mt, entry != NULL);
3188 MT_BUG_ON(mt, mas.status != ma_underflow);
3189
3190 /* prev: Start -> root */
3191 mas_set(&mas, 10);
3192 entry = mas_prev(&mas, 0);
3193 MT_BUG_ON(mt, entry != ptr);
3194 MT_BUG_ON(mt, mas.index != 0);
3195 MT_BUG_ON(mt, mas.last != 0);
3196 MT_BUG_ON(mt, mas.status != ma_root);
3197
3198 /* prev: pause -> root */
3199 mas_set(&mas, 10);
3200 mas_pause(&mas);
3201 entry = mas_prev(&mas, 0);
3202 MT_BUG_ON(mt, entry != ptr);
3203 MT_BUG_ON(mt, mas.index != 0);
3204 MT_BUG_ON(mt, mas.last != 0);
3205 MT_BUG_ON(mt, mas.status != ma_root);
3206
3207 /* next: start -> none */
3208 mas_set(&mas, 0);
3209 entry = mas_next(&mas, ULONG_MAX);
3210 MT_BUG_ON(mt, mas.index != 1);
3211 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3212 MT_BUG_ON(mt, entry != NULL);
3213 MT_BUG_ON(mt, mas.status != ma_none);
3214
3215 /* next: start -> none*/
3216 mas_set(&mas, 10);
3217 entry = mas_next(&mas, ULONG_MAX);
3218 MT_BUG_ON(mt, mas.index != 1);
3219 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3220 MT_BUG_ON(mt, entry != NULL);
3221 MT_BUG_ON(mt, mas.status != ma_none);
3222
3223 /* find: start -> root */
3224 mas_set(&mas, 0);
3225 entry = mas_find(&mas, ULONG_MAX);
3226 MT_BUG_ON(mt, entry != ptr);
3227 MT_BUG_ON(mt, mas.index != 0);
3228 MT_BUG_ON(mt, mas.last != 0);
3229 MT_BUG_ON(mt, mas.status != ma_root);
3230
3231 /* find: root -> none */
3232 entry = mas_find(&mas, ULONG_MAX);
3233 MT_BUG_ON(mt, entry != NULL);
3234 MT_BUG_ON(mt, mas.index != 1);
3235 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3236 MT_BUG_ON(mt, mas.status != ma_none);
3237
3238 /* find: none -> none */
3239 entry = mas_find(&mas, ULONG_MAX);
3240 MT_BUG_ON(mt, entry != NULL);
3241 MT_BUG_ON(mt, mas.index != 1);
3242 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3243 MT_BUG_ON(mt, mas.status != ma_none);
3244
3245 /* find: start -> none */
3246 mas_set(&mas, 10);
3247 entry = mas_find(&mas, ULONG_MAX);
3248 MT_BUG_ON(mt, entry != NULL);
3249 MT_BUG_ON(mt, mas.index != 1);
3250 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3251 MT_BUG_ON(mt, mas.status != ma_none);
3252
3253 /* find_rev: none -> root */
3254 entry = mas_find_rev(&mas, 0);
3255 MT_BUG_ON(mt, entry != ptr);
3256 MT_BUG_ON(mt, mas.index != 0);
3257 MT_BUG_ON(mt, mas.last != 0);
3258 MT_BUG_ON(mt, mas.status != ma_root);
3259
3260 /* find_rev: start -> root */
3261 mas_set(&mas, 0);
3262 entry = mas_find_rev(&mas, 0);
3263 MT_BUG_ON(mt, entry != ptr);
3264 MT_BUG_ON(mt, mas.index != 0);
3265 MT_BUG_ON(mt, mas.last != 0);
3266 MT_BUG_ON(mt, mas.status != ma_root);
3267
3268 /* find_rev: root -> none */
3269 entry = mas_find_rev(&mas, 0);
3270 MT_BUG_ON(mt, entry != NULL);
3271 MT_BUG_ON(mt, mas.index != 0);
3272 MT_BUG_ON(mt, mas.last != 0);
3273 MT_BUG_ON(mt, mas.status != ma_none);
3274
3275 /* find_rev: none -> none */
3276 entry = mas_find_rev(&mas, 0);
3277 MT_BUG_ON(mt, entry != NULL);
3278 MT_BUG_ON(mt, mas.index != 0);
3279 MT_BUG_ON(mt, mas.last != 0);
3280 MT_BUG_ON(mt, mas.status != ma_none);
3281
3282 /* find_rev: start -> root */
3283 mas_set(&mas, 10);
3284 entry = mas_find_rev(&mas, 0);
3285 MT_BUG_ON(mt, entry != ptr);
3286 MT_BUG_ON(mt, mas.index != 0);
3287 MT_BUG_ON(mt, mas.last != 0);
3288 MT_BUG_ON(mt, mas.status != ma_root);
3289
3290 /* walk: start -> none */
3291 mas_set(&mas, 10);
3292 entry = mas_walk(&mas);
3293 MT_BUG_ON(mt, entry != NULL);
3294 MT_BUG_ON(mt, mas.index != 1);
3295 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3296 MT_BUG_ON(mt, mas.status != ma_none);
3297
3298 /* walk: pause -> none*/
3299 mas_set(&mas, 10);
3300 mas_pause(&mas);
3301 entry = mas_walk(&mas);
3302 MT_BUG_ON(mt, entry != NULL);
3303 MT_BUG_ON(mt, mas.index != 1);
3304 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3305 MT_BUG_ON(mt, mas.status != ma_none);
3306
3307 /* walk: none -> none */
3308 mas.index = mas.last = 10;
3309 entry = mas_walk(&mas);
3310 MT_BUG_ON(mt, entry != NULL);
3311 MT_BUG_ON(mt, mas.index != 1);
3312 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3313 MT_BUG_ON(mt, mas.status != ma_none);
3314
3315 /* walk: none -> none */
3316 entry = mas_walk(&mas);
3317 MT_BUG_ON(mt, entry != NULL);
3318 MT_BUG_ON(mt, mas.index != 1);
3319 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3320 MT_BUG_ON(mt, mas.status != ma_none);
3321
3322 /* walk: start -> root */
3323 mas_set(&mas, 0);
3324 entry = mas_walk(&mas);
3325 MT_BUG_ON(mt, entry != ptr);
3326 MT_BUG_ON(mt, mas.index != 0);
3327 MT_BUG_ON(mt, mas.last != 0);
3328 MT_BUG_ON(mt, mas.status != ma_root);
3329
3330 /* walk: pause -> root */
3331 mas_set(&mas, 0);
3332 mas_pause(&mas);
3333 entry = mas_walk(&mas);
3334 MT_BUG_ON(mt, entry != ptr);
3335 MT_BUG_ON(mt, mas.index != 0);
3336 MT_BUG_ON(mt, mas.last != 0);
3337 MT_BUG_ON(mt, mas.status != ma_root);
3338
3339 /* walk: none -> root */
3340 mas.status = ma_none;
3341 entry = mas_walk(&mas);
3342 MT_BUG_ON(mt, entry != ptr);
3343 MT_BUG_ON(mt, mas.index != 0);
3344 MT_BUG_ON(mt, mas.last != 0);
3345 MT_BUG_ON(mt, mas.status != ma_root);
3346
3347 /* walk: root -> root */
3348 entry = mas_walk(&mas);
3349 MT_BUG_ON(mt, entry != ptr);
3350 MT_BUG_ON(mt, mas.index != 0);
3351 MT_BUG_ON(mt, mas.last != 0);
3352 MT_BUG_ON(mt, mas.status != ma_root);
3353
3354 /* walk: root -> none */
3355 mas_set(&mas, 10);
3356 entry = mas_walk(&mas);
3357 MT_BUG_ON(mt, entry != NULL);
3358 MT_BUG_ON(mt, mas.index != 1);
3359 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3360 MT_BUG_ON(mt, mas.status != ma_none);
3361
3362 /* walk: none -> root */
3363 mas.index = mas.last = 0;
3364 entry = mas_walk(&mas);
3365 MT_BUG_ON(mt, entry != ptr);
3366 MT_BUG_ON(mt, mas.index != 0);
3367 MT_BUG_ON(mt, mas.last != 0);
3368 MT_BUG_ON(mt, mas.status != ma_root);
3369
3370 mas_unlock(&mas);
3371
3372 /* Check when there is an actual node */
3373 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3374 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3375 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3376 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3377
3378 mas_lock(&mas);
3379
3380 /* next: start ->active */
3381 mas_set(&mas, 0);
3382 entry = mas_next(&mas, ULONG_MAX);
3383 MT_BUG_ON(mt, entry != ptr);
3384 MT_BUG_ON(mt, mas.index != 0x1000);
3385 MT_BUG_ON(mt, mas.last != 0x1500);
3386 MT_BUG_ON(mt, !mas_is_active(&mas));
3387
3388 /* next: pause ->active */
3389 mas_set(&mas, 0);
3390 mas_pause(&mas);
3391 entry = mas_next(&mas, ULONG_MAX);
3392 MT_BUG_ON(mt, entry != ptr);
3393 MT_BUG_ON(mt, mas.index != 0x1000);
3394 MT_BUG_ON(mt, mas.last != 0x1500);
3395 MT_BUG_ON(mt, !mas_is_active(&mas));
3396
3397 /* next: none ->active */
3398 mas.index = mas.last = 0;
3399 mas.offset = 0;
3400 mas.status = ma_none;
3401 entry = mas_next(&mas, ULONG_MAX);
3402 MT_BUG_ON(mt, entry != ptr);
3403 MT_BUG_ON(mt, mas.index != 0x1000);
3404 MT_BUG_ON(mt, mas.last != 0x1500);
3405 MT_BUG_ON(mt, !mas_is_active(&mas));
3406
3407 /* next:active ->active (spanning limit) */
3408 entry = mas_next(&mas, 0x2100);
3409 MT_BUG_ON(mt, entry != ptr2);
3410 MT_BUG_ON(mt, mas.index != 0x2000);
3411 MT_BUG_ON(mt, mas.last != 0x2500);
3412 MT_BUG_ON(mt, !mas_is_active(&mas));
3413
3414 /* next:active -> overflow (limit reached) beyond data */
3415 entry = mas_next(&mas, 0x2999);
3416 MT_BUG_ON(mt, entry != NULL);
3417 MT_BUG_ON(mt, mas.index != 0x2501);
3418 MT_BUG_ON(mt, mas.last != 0x2fff);
3419 MT_BUG_ON(mt, !mas_is_overflow(&mas));
3420
3421 /* next:overflow -> active (limit changed) */
3422 entry = mas_next(&mas, ULONG_MAX);
3423 MT_BUG_ON(mt, entry != ptr3);
3424 MT_BUG_ON(mt, mas.index != 0x3000);
3425 MT_BUG_ON(mt, mas.last != 0x3500);
3426 MT_BUG_ON(mt, !mas_is_active(&mas));
3427
3428 /* next:active -> overflow (limit reached) */
3429 entry = mas_next(&mas, ULONG_MAX);
3430 MT_BUG_ON(mt, entry != NULL);
3431 MT_BUG_ON(mt, mas.index != 0x3501);
3432 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3433 MT_BUG_ON(mt, !mas_is_overflow(&mas));
3434
3435 /* next:overflow -> overflow */
3436 entry = mas_next(&mas, ULONG_MAX);
3437 MT_BUG_ON(mt, entry != NULL);
3438 MT_BUG_ON(mt, mas.index != 0x3501);
3439 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3440 MT_BUG_ON(mt, !mas_is_overflow(&mas));
3441
3442 /* prev:overflow -> active */
3443 entry = mas_prev(&mas, 0);
3444 MT_BUG_ON(mt, entry != ptr3);
3445 MT_BUG_ON(mt, mas.index != 0x3000);
3446 MT_BUG_ON(mt, mas.last != 0x3500);
3447 MT_BUG_ON(mt, !mas_is_active(&mas));
3448
3449 /* next: none -> active, skip value at location */
3450 mas_set(&mas, 0);
3451 entry = mas_next(&mas, ULONG_MAX);
3452 mas.status = ma_none;
3453 mas.offset = 0;
3454 entry = mas_next(&mas, ULONG_MAX);
3455 MT_BUG_ON(mt, entry != ptr2);
3456 MT_BUG_ON(mt, mas.index != 0x2000);
3457 MT_BUG_ON(mt, mas.last != 0x2500);
3458 MT_BUG_ON(mt, !mas_is_active(&mas));
3459
3460 /* prev:active ->active */
3461 entry = mas_prev(&mas, 0);
3462 MT_BUG_ON(mt, entry != ptr);
3463 MT_BUG_ON(mt, mas.index != 0x1000);
3464 MT_BUG_ON(mt, mas.last != 0x1500);
3465 MT_BUG_ON(mt, !mas_is_active(&mas));
3466
3467 /* prev:active -> underflow (span limit) */
3468 mas_next(&mas, ULONG_MAX);
3469 entry = mas_prev(&mas, 0x1200);
3470 MT_BUG_ON(mt, entry != ptr);
3471 MT_BUG_ON(mt, mas.index != 0x1000);
3472 MT_BUG_ON(mt, mas.last != 0x1500);
3473 MT_BUG_ON(mt, !mas_is_active(&mas)); /* spanning limit */
3474 entry = mas_prev(&mas, 0x1200); /* underflow */
3475 MT_BUG_ON(mt, entry != NULL);
3476 MT_BUG_ON(mt, mas.index != 0x1000);
3477 MT_BUG_ON(mt, mas.last != 0x1500);
3478 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3479
3480 /* prev:underflow -> underflow (lower limit) spanning end range */
3481 entry = mas_prev(&mas, 0x0100);
3482 MT_BUG_ON(mt, entry != NULL);
3483 MT_BUG_ON(mt, mas.index != 0);
3484 MT_BUG_ON(mt, mas.last != 0x0FFF);
3485 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3486
3487 /* prev:underflow -> underflow */
3488 entry = mas_prev(&mas, 0);
3489 MT_BUG_ON(mt, entry != NULL);
3490 MT_BUG_ON(mt, mas.index != 0);
3491 MT_BUG_ON(mt, mas.last != 0x0FFF);
3492 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3493
3494 /* prev:underflow -> underflow */
3495 entry = mas_prev(&mas, 0);
3496 MT_BUG_ON(mt, entry != NULL);
3497 MT_BUG_ON(mt, mas.index != 0);
3498 MT_BUG_ON(mt, mas.last != 0x0FFF);
3499 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3500
3501 /* next:underflow -> active */
3502 entry = mas_next(&mas, ULONG_MAX);
3503 MT_BUG_ON(mt, entry != ptr);
3504 MT_BUG_ON(mt, mas.index != 0x1000);
3505 MT_BUG_ON(mt, mas.last != 0x1500);
3506 MT_BUG_ON(mt, !mas_is_active(&mas));
3507
3508 /* prev:first value -> underflow */
3509 entry = mas_prev(&mas, 0x1000);
3510 MT_BUG_ON(mt, entry != NULL);
3511 MT_BUG_ON(mt, mas.index != 0x1000);
3512 MT_BUG_ON(mt, mas.last != 0x1500);
3513 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3514
3515 /* find:underflow -> first value */
3516 entry = mas_find(&mas, ULONG_MAX);
3517 MT_BUG_ON(mt, entry != ptr);
3518 MT_BUG_ON(mt, mas.index != 0x1000);
3519 MT_BUG_ON(mt, mas.last != 0x1500);
3520 MT_BUG_ON(mt, !mas_is_active(&mas));
3521
3522 /* prev: pause ->active */
3523 mas_set(&mas, 0x3600);
3524 entry = mas_prev(&mas, 0);
3525 MT_BUG_ON(mt, entry != ptr3);
3526 mas_pause(&mas);
3527 entry = mas_prev(&mas, 0);
3528 MT_BUG_ON(mt, entry != ptr2);
3529 MT_BUG_ON(mt, mas.index != 0x2000);
3530 MT_BUG_ON(mt, mas.last != 0x2500);
3531 MT_BUG_ON(mt, !mas_is_active(&mas));
3532
3533 /* prev:active -> underflow spanning min */
3534 entry = mas_prev(&mas, 0x1600);
3535 MT_BUG_ON(mt, entry != NULL);
3536 MT_BUG_ON(mt, mas.index != 0x1501);
3537 MT_BUG_ON(mt, mas.last != 0x1FFF);
3538 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3539
3540 /* prev: active ->active, continue */
3541 entry = mas_prev(&mas, 0);
3542 MT_BUG_ON(mt, entry != ptr);
3543 MT_BUG_ON(mt, mas.index != 0x1000);
3544 MT_BUG_ON(mt, mas.last != 0x1500);
3545 MT_BUG_ON(mt, !mas_is_active(&mas));
3546
3547 /* find: start ->active */
3548 mas_set(&mas, 0);
3549 entry = mas_find(&mas, ULONG_MAX);
3550 MT_BUG_ON(mt, entry != ptr);
3551 MT_BUG_ON(mt, mas.index != 0x1000);
3552 MT_BUG_ON(mt, mas.last != 0x1500);
3553 MT_BUG_ON(mt, !mas_is_active(&mas));
3554
3555 /* find: pause ->active */
3556 mas_set(&mas, 0);
3557 mas_pause(&mas);
3558 entry = mas_find(&mas, ULONG_MAX);
3559 MT_BUG_ON(mt, entry != ptr);
3560 MT_BUG_ON(mt, mas.index != 0x1000);
3561 MT_BUG_ON(mt, mas.last != 0x1500);
3562 MT_BUG_ON(mt, !mas_is_active(&mas));
3563
3564 /* find: start ->active on value */;
3565 mas_set(&mas, 1200);
3566 entry = mas_find(&mas, ULONG_MAX);
3567 MT_BUG_ON(mt, entry != ptr);
3568 MT_BUG_ON(mt, mas.index != 0x1000);
3569 MT_BUG_ON(mt, mas.last != 0x1500);
3570 MT_BUG_ON(mt, !mas_is_active(&mas));
3571
3572 /* find:active ->active */
3573 entry = mas_find(&mas, ULONG_MAX);
3574 MT_BUG_ON(mt, entry != ptr2);
3575 MT_BUG_ON(mt, mas.index != 0x2000);
3576 MT_BUG_ON(mt, mas.last != 0x2500);
3577 MT_BUG_ON(mt, !mas_is_active(&mas));
3578
3579
3580 /* find:active -> active (NULL)*/
3581 entry = mas_find(&mas, 0x2700);
3582 MT_BUG_ON(mt, entry != NULL);
3583 MT_BUG_ON(mt, mas.index != 0x2501);
3584 MT_BUG_ON(mt, mas.last != 0x2FFF);
3585 MAS_BUG_ON(&mas, !mas_is_active(&mas));
3586
3587 /* find: overflow ->active */
3588 entry = mas_find(&mas, 0x5000);
3589 MT_BUG_ON(mt, entry != ptr3);
3590 MT_BUG_ON(mt, mas.index != 0x3000);
3591 MT_BUG_ON(mt, mas.last != 0x3500);
3592 MT_BUG_ON(mt, !mas_is_active(&mas));
3593
3594 /* find:active -> active (NULL) end*/
3595 entry = mas_find(&mas, ULONG_MAX);
3596 MT_BUG_ON(mt, entry != NULL);
3597 MT_BUG_ON(mt, mas.index != 0x3501);
3598 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3599 MAS_BUG_ON(&mas, !mas_is_active(&mas));
3600
3601 /* find_rev: active (END) ->active */
3602 entry = mas_find_rev(&mas, 0);
3603 MT_BUG_ON(mt, entry != ptr3);
3604 MT_BUG_ON(mt, mas.index != 0x3000);
3605 MT_BUG_ON(mt, mas.last != 0x3500);
3606 MT_BUG_ON(mt, !mas_is_active(&mas));
3607
3608 /* find_rev:active ->active */
3609 entry = mas_find_rev(&mas, 0);
3610 MT_BUG_ON(mt, entry != ptr2);
3611 MT_BUG_ON(mt, mas.index != 0x2000);
3612 MT_BUG_ON(mt, mas.last != 0x2500);
3613 MT_BUG_ON(mt, !mas_is_active(&mas));
3614
3615 /* find_rev: pause ->active */
3616 mas_pause(&mas);
3617 entry = mas_find_rev(&mas, 0);
3618 MT_BUG_ON(mt, entry != ptr);
3619 MT_BUG_ON(mt, mas.index != 0x1000);
3620 MT_BUG_ON(mt, mas.last != 0x1500);
3621 MT_BUG_ON(mt, !mas_is_active(&mas));
3622
3623 /* find_rev:active -> underflow */
3624 entry = mas_find_rev(&mas, 0);
3625 MT_BUG_ON(mt, entry != NULL);
3626 MT_BUG_ON(mt, mas.index != 0);
3627 MT_BUG_ON(mt, mas.last != 0x0FFF);
3628 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3629
3630 /* find_rev: start ->active */
3631 mas_set(&mas, 0x1200);
3632 entry = mas_find_rev(&mas, 0);
3633 MT_BUG_ON(mt, entry != ptr);
3634 MT_BUG_ON(mt, mas.index != 0x1000);
3635 MT_BUG_ON(mt, mas.last != 0x1500);
3636 MT_BUG_ON(mt, !mas_is_active(&mas));
3637
3638 /* mas_walk start ->active */
3639 mas_set(&mas, 0x1200);
3640 entry = mas_walk(&mas);
3641 MT_BUG_ON(mt, entry != ptr);
3642 MT_BUG_ON(mt, mas.index != 0x1000);
3643 MT_BUG_ON(mt, mas.last != 0x1500);
3644 MT_BUG_ON(mt, !mas_is_active(&mas));
3645
3646 /* mas_walk start ->active */
3647 mas_set(&mas, 0x1600);
3648 entry = mas_walk(&mas);
3649 MT_BUG_ON(mt, entry != NULL);
3650 MT_BUG_ON(mt, mas.index != 0x1501);
3651 MT_BUG_ON(mt, mas.last != 0x1fff);
3652 MT_BUG_ON(mt, !mas_is_active(&mas));
3653
3654 /* mas_walk pause ->active */
3655 mas_set(&mas, 0x1200);
3656 mas_pause(&mas);
3657 entry = mas_walk(&mas);
3658 MT_BUG_ON(mt, entry != ptr);
3659 MT_BUG_ON(mt, mas.index != 0x1000);
3660 MT_BUG_ON(mt, mas.last != 0x1500);
3661 MT_BUG_ON(mt, !mas_is_active(&mas));
3662
3663 /* mas_walk pause -> active */
3664 mas_set(&mas, 0x1600);
3665 mas_pause(&mas);
3666 entry = mas_walk(&mas);
3667 MT_BUG_ON(mt, entry != NULL);
3668 MT_BUG_ON(mt, mas.index != 0x1501);
3669 MT_BUG_ON(mt, mas.last != 0x1fff);
3670 MT_BUG_ON(mt, !mas_is_active(&mas));
3671
3672 /* mas_walk none -> active */
3673 mas_set(&mas, 0x1200);
3674 mas.status = ma_none;
3675 entry = mas_walk(&mas);
3676 MT_BUG_ON(mt, entry != ptr);
3677 MT_BUG_ON(mt, mas.index != 0x1000);
3678 MT_BUG_ON(mt, mas.last != 0x1500);
3679 MT_BUG_ON(mt, !mas_is_active(&mas));
3680
3681 /* mas_walk none -> active */
3682 mas_set(&mas, 0x1600);
3683 mas.status = ma_none;
3684 entry = mas_walk(&mas);
3685 MT_BUG_ON(mt, entry != NULL);
3686 MT_BUG_ON(mt, mas.index != 0x1501);
3687 MT_BUG_ON(mt, mas.last != 0x1fff);
3688 MT_BUG_ON(mt, !mas_is_active(&mas));
3689
3690 /* mas_walk active -> active */
3691 mas.index = 0x1200;
3692 mas.last = 0x1200;
3693 mas.offset = 0;
3694 entry = mas_walk(&mas);
3695 MT_BUG_ON(mt, entry != ptr);
3696 MT_BUG_ON(mt, mas.index != 0x1000);
3697 MT_BUG_ON(mt, mas.last != 0x1500);
3698 MT_BUG_ON(mt, !mas_is_active(&mas));
3699
3700 /* mas_walk active -> active */
3701 mas.index = 0x1600;
3702 mas.last = 0x1600;
3703 entry = mas_walk(&mas);
3704 MT_BUG_ON(mt, entry != NULL);
3705 MT_BUG_ON(mt, mas.index != 0x1501);
3706 MT_BUG_ON(mt, mas.last != 0x1fff);
3707 MT_BUG_ON(mt, !mas_is_active(&mas));
3708
3709 mas_unlock(&mas);
3710 }
3711
alloc_cyclic_testing(struct maple_tree * mt)3712 static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
3713 {
3714 unsigned long location;
3715 unsigned long next;
3716 int ret = 0;
3717 MA_STATE(mas, mt, 0, 0);
3718
3719 next = 0;
3720 mtree_lock(mt);
3721 for (int i = 0; i < 100; i++) {
3722 mas_alloc_cyclic(&mas, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3723 MAS_BUG_ON(&mas, i != location - 2);
3724 MAS_BUG_ON(&mas, mas.index != location);
3725 MAS_BUG_ON(&mas, mas.last != location);
3726 MAS_BUG_ON(&mas, i != next - 3);
3727 }
3728
3729 mtree_unlock(mt);
3730 mtree_destroy(mt);
3731 next = 0;
3732 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3733 for (int i = 0; i < 100; i++) {
3734 mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3735 MT_BUG_ON(mt, i != location - 2);
3736 MT_BUG_ON(mt, i != next - 3);
3737 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3738 }
3739
3740 mtree_destroy(mt);
3741
3742 /*
3743 * Issue with reverse search was discovered
3744 * https://lore.kernel.org/all/[email protected]/
3745 * Exhausting the allocation area and forcing the search to wrap needs a
3746 * mas_reset() in mas_alloc_cyclic().
3747 */
3748 next = 0;
3749 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3750 for (int i = 0; i < 1023; i++) {
3751 mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3752 MT_BUG_ON(mt, i != location - 2);
3753 MT_BUG_ON(mt, i != next - 3);
3754 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3755 }
3756 mtree_erase(mt, 123);
3757 MT_BUG_ON(mt, mtree_load(mt, 123) != NULL);
3758 mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3759 MT_BUG_ON(mt, 123 != location);
3760 MT_BUG_ON(mt, 124 != next);
3761 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3762 mtree_erase(mt, 100);
3763 mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3764 MT_BUG_ON(mt, 100 != location);
3765 MT_BUG_ON(mt, 101 != next);
3766 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3767 mtree_destroy(mt);
3768
3769 /* Overflow test */
3770 next = ULONG_MAX - 1;
3771 ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3772 MT_BUG_ON(mt, ret != 0);
3773 ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3774 MT_BUG_ON(mt, ret != 0);
3775 ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3776 MT_BUG_ON(mt, ret != 1);
3777 }
3778
3779 static DEFINE_MTREE(tree);
maple_tree_seed(void)3780 static int __init maple_tree_seed(void)
3781 {
3782 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3783 1001, 1002, 1003, 1005, 0,
3784 5003, 5002};
3785 void *ptr = &set;
3786
3787 pr_info("\nTEST STARTING\n\n");
3788
3789 #if defined(BENCH_SLOT_STORE)
3790 #define BENCH
3791 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3792 bench_slot_store(&tree);
3793 mtree_destroy(&tree);
3794 goto skip;
3795 #endif
3796 #if defined(BENCH_NODE_STORE)
3797 #define BENCH
3798 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3799 bench_node_store(&tree);
3800 mtree_destroy(&tree);
3801 goto skip;
3802 #endif
3803 #if defined(BENCH_AWALK)
3804 #define BENCH
3805 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3806 bench_awalk(&tree);
3807 mtree_destroy(&tree);
3808 goto skip;
3809 #endif
3810 #if defined(BENCH_WALK)
3811 #define BENCH
3812 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3813 bench_walk(&tree);
3814 mtree_destroy(&tree);
3815 goto skip;
3816 #endif
3817 #if defined(BENCH_LOAD)
3818 #define BENCH
3819 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3820 bench_load(&tree);
3821 mtree_destroy(&tree);
3822 goto skip;
3823 #endif
3824 #if defined(BENCH_FORK)
3825 #define BENCH
3826 bench_forking();
3827 goto skip;
3828 #endif
3829 #if defined(BENCH_MT_FOR_EACH)
3830 #define BENCH
3831 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3832 bench_mt_for_each(&tree);
3833 mtree_destroy(&tree);
3834 goto skip;
3835 #endif
3836 #if defined(BENCH_MAS_FOR_EACH)
3837 #define BENCH
3838 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3839 bench_mas_for_each(&tree);
3840 mtree_destroy(&tree);
3841 goto skip;
3842 #endif
3843 #if defined(BENCH_MAS_PREV)
3844 #define BENCH
3845 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3846 bench_mas_prev(&tree);
3847 mtree_destroy(&tree);
3848 goto skip;
3849 #endif
3850
3851 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3852 check_deficient_node(&tree);
3853 mtree_destroy(&tree);
3854
3855 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3856 check_store_null(&tree);
3857 mtree_destroy(&tree);
3858
3859 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3860 check_root_expand(&tree);
3861 mtree_destroy(&tree);
3862
3863 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3864 check_iteration(&tree);
3865 mtree_destroy(&tree);
3866
3867 check_forking();
3868
3869 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3870 check_mas_store_gfp(&tree);
3871 mtree_destroy(&tree);
3872
3873 /* Test ranges (store and insert) */
3874 mt_init_flags(&tree, 0);
3875 check_ranges(&tree);
3876 mtree_destroy(&tree);
3877
3878 #if defined(CONFIG_64BIT)
3879 /* These tests have ranges outside of 4GB */
3880 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3881 check_alloc_range(&tree);
3882 mtree_destroy(&tree);
3883
3884 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3885 check_alloc_rev_range(&tree);
3886 mtree_destroy(&tree);
3887 #endif
3888
3889 mt_init_flags(&tree, 0);
3890
3891 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3892
3893 check_insert(&tree, set[9], &tree); /* Insert 0 */
3894 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3895 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3896
3897 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3898 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3899 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3900 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3901
3902 /* Clear out the tree */
3903 mtree_destroy(&tree);
3904
3905 /* Try to insert, insert a dup, and load back what was inserted. */
3906 mt_init_flags(&tree, 0);
3907 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3908 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3909 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3910
3911 /*
3912 * Second set of tests try to load a value that doesn't exist, inserts
3913 * a second value, then loads the value again
3914 */
3915 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3916 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3917 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3918 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3919 /*
3920 * Tree currently contains:
3921 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3922 */
3923 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3924 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3925
3926 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3927 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3928 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3929 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3930
3931 /* Clear out tree */
3932 mtree_destroy(&tree);
3933
3934 mt_init_flags(&tree, 0);
3935 /* Test inserting into a NULL hole. */
3936 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3937 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3938 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3939 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3940 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3941 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3942
3943 /* Clear out the tree */
3944 mtree_destroy(&tree);
3945
3946 mt_init_flags(&tree, 0);
3947 /*
3948 * set[] = {5015, 5014, 5017, 25, 1000,
3949 * 1001, 1002, 1003, 1005, 0,
3950 * 5003, 5002};
3951 */
3952
3953 check_insert(&tree, set[0], ptr); /* 5015 */
3954 check_insert(&tree, set[1], &tree); /* 5014 */
3955 check_insert(&tree, set[2], ptr); /* 5017 */
3956 check_insert(&tree, set[3], &tree); /* 25 */
3957 check_load(&tree, set[0], ptr);
3958 check_load(&tree, set[1], &tree);
3959 check_load(&tree, set[2], ptr);
3960 check_load(&tree, set[3], &tree);
3961 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3962 check_load(&tree, set[0], ptr);
3963 check_load(&tree, set[1], &tree);
3964 check_load(&tree, set[2], ptr);
3965 check_load(&tree, set[3], &tree); /*25 */
3966 check_load(&tree, set[4], ptr);
3967 check_insert(&tree, set[5], &tree); /* 1001 */
3968 check_load(&tree, set[0], ptr);
3969 check_load(&tree, set[1], &tree);
3970 check_load(&tree, set[2], ptr);
3971 check_load(&tree, set[3], &tree);
3972 check_load(&tree, set[4], ptr);
3973 check_load(&tree, set[5], &tree);
3974 check_insert(&tree, set[6], ptr);
3975 check_load(&tree, set[0], ptr);
3976 check_load(&tree, set[1], &tree);
3977 check_load(&tree, set[2], ptr);
3978 check_load(&tree, set[3], &tree);
3979 check_load(&tree, set[4], ptr);
3980 check_load(&tree, set[5], &tree);
3981 check_load(&tree, set[6], ptr);
3982 check_insert(&tree, set[7], &tree);
3983 check_load(&tree, set[0], ptr);
3984 check_insert(&tree, set[8], ptr);
3985
3986 check_insert(&tree, set[9], &tree);
3987
3988 check_load(&tree, set[0], ptr);
3989 check_load(&tree, set[1], &tree);
3990 check_load(&tree, set[2], ptr);
3991 check_load(&tree, set[3], &tree);
3992 check_load(&tree, set[4], ptr);
3993 check_load(&tree, set[5], &tree);
3994 check_load(&tree, set[6], ptr);
3995 check_load(&tree, set[9], &tree);
3996 mtree_destroy(&tree);
3997
3998 mt_init_flags(&tree, 0);
3999 check_seq(&tree, 16, false);
4000 mtree_destroy(&tree);
4001
4002 mt_init_flags(&tree, 0);
4003 check_seq(&tree, 1000, true);
4004 mtree_destroy(&tree);
4005
4006 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4007 check_rev_seq(&tree, 1000, true);
4008 mtree_destroy(&tree);
4009
4010 check_lower_bound_split(&tree);
4011 check_upper_bound_split(&tree);
4012 check_mid_split(&tree);
4013
4014 mt_init_flags(&tree, 0);
4015 check_next_entry(&tree);
4016 check_find(&tree);
4017 check_find_2(&tree);
4018 mtree_destroy(&tree);
4019
4020 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4021 check_prev_entry(&tree);
4022 mtree_destroy(&tree);
4023
4024 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4025 check_gap_combining(&tree);
4026 mtree_destroy(&tree);
4027
4028 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4029 check_node_overwrite(&tree);
4030 mtree_destroy(&tree);
4031
4032 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4033 next_prev_test(&tree);
4034 mtree_destroy(&tree);
4035
4036 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4037 check_spanning_relatives(&tree);
4038 mtree_destroy(&tree);
4039
4040 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4041 check_rev_find(&tree);
4042 mtree_destroy(&tree);
4043
4044 mt_init_flags(&tree, 0);
4045 check_fuzzer(&tree);
4046 mtree_destroy(&tree);
4047
4048 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4049 check_dup(&tree);
4050 mtree_destroy(&tree);
4051
4052 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4053 check_bnode_min_spanning(&tree);
4054 mtree_destroy(&tree);
4055
4056 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4057 check_empty_area_window(&tree);
4058 mtree_destroy(&tree);
4059
4060 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4061 check_empty_area_fill(&tree);
4062 mtree_destroy(&tree);
4063
4064 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4065 check_state_handling(&tree);
4066 mtree_destroy(&tree);
4067
4068 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4069 alloc_cyclic_testing(&tree);
4070 mtree_destroy(&tree);
4071
4072
4073 #if defined(BENCH)
4074 skip:
4075 #endif
4076 rcu_barrier();
4077 pr_info("maple_tree: %u of %u tests passed\n",
4078 atomic_read(&maple_tree_tests_passed),
4079 atomic_read(&maple_tree_tests_run));
4080 if (atomic_read(&maple_tree_tests_run) ==
4081 atomic_read(&maple_tree_tests_passed))
4082 return 0;
4083
4084 return -EINVAL;
4085 }
4086
maple_tree_harvest(void)4087 static void __exit maple_tree_harvest(void)
4088 {
4089
4090 }
4091
4092 module_init(maple_tree_seed);
4093 module_exit(maple_tree_harvest);
4094 MODULE_AUTHOR("Liam R. Howlett <[email protected]>");
4095 MODULE_DESCRIPTION("maple tree API test module");
4096 MODULE_LICENSE("GPL");
4097