Lines Matching +full:mm +full:- +full:0

1 // SPDX-License-Identifier: GPL-2.0-or-later
7 #include "generated/bit-length.h"
9 #include "maple-shared.h"
13 #include "../../../mm/vma.h"
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
29 * provides userland-equivalent functionality for everything vma.c uses.
31 #include "../../../mm/vma.c"
44 } while (0)
58 return (unsigned long)-1; in rlimit()
62 static struct vm_area_struct *alloc_vma(struct mm_struct *mm, in alloc_vma() argument
68 struct vm_area_struct *ret = vm_area_alloc(mm); in alloc_vma()
73 ret->vm_start = start; in alloc_vma()
74 ret->vm_end = end; in alloc_vma()
75 ret->vm_pgoff = pgoff; in alloc_vma()
76 ret->__vm_flags = flags; in alloc_vma()
82 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm, in alloc_and_link_vma() argument
88 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, flags); in alloc_and_link_vma()
93 if (vma_link(mm, vma)) { in alloc_and_link_vma()
103 vma->vm_lock_seq = UINT_MAX; in alloc_and_link_vma()
115 vmg->next = vma_next(vmg->vmi); in merge_new()
116 vmg->prev = vma_prev(vmg->vmi); in merge_new()
117 vma_iter_next_range(vmg->vmi); in merge_new()
147 vma_iter_set(vmg->vmi, start); in vmg_set_range()
149 vmg->prev = NULL; in vmg_set_range()
150 vmg->next = NULL; in vmg_set_range()
151 vmg->vma = NULL; in vmg_set_range()
153 vmg->start = start; in vmg_set_range()
154 vmg->end = end; in vmg_set_range()
155 vmg->pgoff = pgoff; in vmg_set_range()
156 vmg->flags = flags; in vmg_set_range()
165 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm, in try_merge_new_vma() argument
178 ASSERT_EQ(vmg->state, VMA_MERGE_SUCCESS); in try_merge_new_vma()
184 ASSERT_EQ(vmg->state, VMA_MERGE_NOMERGE); in try_merge_new_vma()
186 return alloc_and_link_vma(mm, start, end, pgoff, flags); in try_merge_new_vma()
203 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
206 int count = 0; in cleanup_mm()
211 vma_iter_set(vmi, 0); in cleanup_mm()
217 mtree_destroy(&mm->mm_mt); in cleanup_mm()
218 mm->map_count = 0; in cleanup_mm()
225 int seq = vma->vm_lock_seq; in vma_write_started()
228 vma->vm_lock_seq = UINT_MAX; in vma_write_started()
231 return seq > -1; in vma_write_started()
234 /* Helper function providing a dummy vm_ops->close() method.*/
243 struct mm_struct mm = {}; in test_simple_merge() local
244 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_merge()
245 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, flags); in test_simple_merge()
246 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
248 .mm = &mm, in test_simple_merge()
250 .start = 0x1000, in test_simple_merge()
251 .end = 0x2000, in test_simple_merge()
256 ASSERT_FALSE(vma_link(&mm, vma_left)); in test_simple_merge()
257 ASSERT_FALSE(vma_link(&mm, vma_right)); in test_simple_merge()
262 ASSERT_EQ(vma->vm_start, 0); in test_simple_merge()
263 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_merge()
264 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_merge()
265 ASSERT_EQ(vma->vm_flags, flags); in test_simple_merge()
268 mtree_destroy(&mm.mm_mt); in test_simple_merge()
277 struct mm_struct mm = {}; in test_simple_modify() local
278 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_modify()
279 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
281 ASSERT_FALSE(vma_link(&mm, init_vma)); in test_simple_modify()
288 0x1000, 0x2000, VM_READ | VM_MAYREAD); in test_simple_modify()
293 ASSERT_EQ(vma->vm_start, 0x1000); in test_simple_modify()
294 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
295 ASSERT_EQ(vma->vm_pgoff, 1); in test_simple_modify()
302 vma_iter_set(&vmi, 0); in test_simple_modify()
305 ASSERT_EQ(vma->vm_start, 0); in test_simple_modify()
306 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_modify()
307 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_modify()
314 ASSERT_EQ(vma->vm_start, 0x1000); in test_simple_modify()
315 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
316 ASSERT_EQ(vma->vm_pgoff, 1); in test_simple_modify()
323 ASSERT_EQ(vma->vm_start, 0x2000); in test_simple_modify()
324 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_modify()
325 ASSERT_EQ(vma->vm_pgoff, 2); in test_simple_modify()
328 mtree_destroy(&mm.mm_mt); in test_simple_modify()
336 struct mm_struct mm = {}; in test_simple_expand() local
337 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_expand()
338 VMA_ITERATOR(vmi, &mm, 0); in test_simple_expand()
342 .start = 0, in test_simple_expand()
343 .end = 0x3000, in test_simple_expand()
344 .pgoff = 0, in test_simple_expand()
347 ASSERT_FALSE(vma_link(&mm, vma)); in test_simple_expand()
351 ASSERT_EQ(vma->vm_start, 0); in test_simple_expand()
352 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_expand()
353 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_expand()
356 mtree_destroy(&mm.mm_mt); in test_simple_expand()
364 struct mm_struct mm = {}; in test_simple_shrink() local
365 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_shrink()
366 VMA_ITERATOR(vmi, &mm, 0); in test_simple_shrink()
368 ASSERT_FALSE(vma_link(&mm, vma)); in test_simple_shrink()
370 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0)); in test_simple_shrink()
372 ASSERT_EQ(vma->vm_start, 0); in test_simple_shrink()
373 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_shrink()
374 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_shrink()
377 mtree_destroy(&mm.mm_mt); in test_simple_shrink()
385 struct mm_struct mm = {}; in test_merge_new() local
386 VMA_ITERATOR(vmi, &mm, 0); in test_merge_new()
388 .mm = &mm, in test_merge_new()
414 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_merge_new()
417 INIT_LIST_HEAD(&vma_a->anon_vma_chain); in test_merge_new()
418 list_add(&dummy_anon_vma_chain_a.same_vma, &vma_a->anon_vma_chain); in test_merge_new()
420 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_new()
422 INIT_LIST_HEAD(&vma_b->anon_vma_chain); in test_merge_new()
423 list_add(&dummy_anon_vma_chain_b.same_vma, &vma_b->anon_vma_chain); in test_merge_new()
425 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, flags); in test_merge_new()
427 INIT_LIST_HEAD(&vma_c->anon_vma_chain); in test_merge_new()
428 list_add(&dummy_anon_vma_chain_c.same_vma, &vma_c->anon_vma_chain); in test_merge_new()
436 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, flags, &merged); in test_merge_new()
438 INIT_LIST_HEAD(&vma_d->anon_vma_chain); in test_merge_new()
439 list_add(&dummy_anon_vma_chain_d.same_vma, &vma_d->anon_vma_chain); in test_merge_new()
441 ASSERT_EQ(mm.map_count, 4); in test_merge_new()
449 vma_a->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_new()
450 vma_b->anon_vma = &dummy_anon_vma; in test_merge_new()
451 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, flags, &merged); in test_merge_new()
455 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
456 ASSERT_EQ(vma->vm_end, 0x4000); in test_merge_new()
457 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
458 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
460 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
468 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, flags, &merged); in test_merge_new()
472 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
473 ASSERT_EQ(vma->vm_end, 0x5000); in test_merge_new()
474 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
475 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
477 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
485 vma_d->anon_vma = &dummy_anon_vma; in test_merge_new()
486 vma_d->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_new()
487 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, flags, &merged); in test_merge_new()
491 ASSERT_EQ(vma->vm_start, 0x6000); in test_merge_new()
492 ASSERT_EQ(vma->vm_end, 0x9000); in test_merge_new()
493 ASSERT_EQ(vma->vm_pgoff, 6); in test_merge_new()
494 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
496 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
504 vma_d->vm_ops = NULL; /* This would otherwise degrade the merge. */ in test_merge_new()
505 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, flags, &merged); in test_merge_new()
509 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
510 ASSERT_EQ(vma->vm_end, 0x9000); in test_merge_new()
511 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
512 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
514 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
522 vma_c->anon_vma = &dummy_anon_vma; in test_merge_new()
523 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, flags, &merged); in test_merge_new()
527 ASSERT_EQ(vma->vm_start, 0xa000); in test_merge_new()
528 ASSERT_EQ(vma->vm_end, 0xc000); in test_merge_new()
529 ASSERT_EQ(vma->vm_pgoff, 0xa); in test_merge_new()
530 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
532 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
540 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, flags, &merged); in test_merge_new()
544 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
545 ASSERT_EQ(vma->vm_end, 0xc000); in test_merge_new()
546 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
547 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
549 ASSERT_EQ(mm.map_count, 1); in test_merge_new()
558 count = 0; in test_merge_new()
559 vma_iter_set(&vmi, 0); in test_merge_new()
562 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
563 ASSERT_EQ(vma->vm_end, 0xc000); in test_merge_new()
564 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
565 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
574 mtree_destroy(&mm.mm_mt); in test_merge_new()
581 struct mm_struct mm = {}; in test_vma_merge_special_flags() local
582 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_special_flags()
584 .mm = &mm, in test_vma_merge_special_flags()
588 vm_flags_t all_special_flags = 0; in test_vma_merge_special_flags()
593 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { in test_vma_merge_special_flags()
602 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_special_flags()
613 vmg_set_range(&vmg, 0x3000, 0x4000, 3, flags); in test_vma_merge_special_flags()
614 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { in test_vma_merge_special_flags()
617 vma_left->__vm_flags = flags | special_flag; in test_vma_merge_special_flags()
632 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_vma_merge_special_flags()
636 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { in test_vma_merge_special_flags()
639 vma_left->__vm_flags = flags | special_flag; in test_vma_merge_special_flags()
646 cleanup_mm(&mm, &vmi); in test_vma_merge_special_flags()
653 struct mm_struct mm = {}; in test_vma_merge_with_close() local
654 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_with_close()
656 .mm = &mm, in test_vma_merge_with_close()
666 * vm_ops->close() hook. in test_vma_merge_with_close()
680 * has a vm_ops->close() callback that will need to be called when in test_vma_merge_with_close()
691 * vm_ops->close: - - !NULL in test_vma_merge_with_close()
700 * vm_ops->close: - !NULL in test_vma_merge_with_close()
709 * - !NULL NULL in test_vma_merge_with_close()
712 * Cannot occur, because vma->vm_ops being the same implies the same in test_vma_merge_with_close()
713 * vma->vm_file, and therefore this would mean that next->vm_ops->close in test_vma_merge_with_close()
719 * is one where both the previous and next VMAs are merged - in this in test_vma_merge_with_close()
727 * -> in test_vma_merge_with_close()
732 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
733 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
734 vma_next->vm_ops = &vm_ops; in test_vma_merge_with_close()
736 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
739 ASSERT_EQ(vma_prev->vm_start, 0); in test_vma_merge_with_close()
740 ASSERT_EQ(vma_prev->vm_end, 0x5000); in test_vma_merge_with_close()
741 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_vma_merge_with_close()
743 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
757 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
758 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
759 vma->vm_ops = &vm_ops; in test_vma_merge_with_close()
761 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
772 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
785 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
786 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
787 vma->vm_ops = &vm_ops; in test_vma_merge_with_close()
789 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
799 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
813 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
814 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
815 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
816 vma->vm_ops = &vm_ops; in test_vma_merge_with_close()
818 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
825 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_vma_merge_with_close()
834 * -> in test_vma_merge_with_close()
839 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
840 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
841 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
842 vma_next->vm_ops = &vm_ops; in test_vma_merge_with_close()
844 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
850 ASSERT_EQ(vma_prev->vm_start, 0); in test_vma_merge_with_close()
851 ASSERT_EQ(vma_prev->vm_end, 0x5000); in test_vma_merge_with_close()
852 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_vma_merge_with_close()
854 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
862 struct mm_struct mm = {}; in test_vma_merge_new_with_close() local
863 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_new_with_close()
865 .mm = &mm, in test_vma_merge_new_with_close()
868 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_vma_merge_new_with_close()
869 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, flags); in test_vma_merge_new_with_close()
877 * surrounding VMAs have vm_ops->close() hooks (but are otherwise in test_vma_merge_new_with_close()
881 * A v-------v B in test_vma_merge_new_with_close()
882 * |-----| |-----| in test_vma_merge_new_with_close()
891 * |------------||-----| in test_vma_merge_new_with_close()
895 /* Have prev and next have a vm_ops->close() hook. */ in test_vma_merge_new_with_close()
896 vma_prev->vm_ops = &vm_ops; in test_vma_merge_new_with_close()
897 vma_next->vm_ops = &vm_ops; in test_vma_merge_new_with_close()
899 vmg_set_range(&vmg, 0x2000, 0x5000, 2, flags); in test_vma_merge_new_with_close()
903 ASSERT_EQ(vma->vm_start, 0); in test_vma_merge_new_with_close()
904 ASSERT_EQ(vma->vm_end, 0x5000); in test_vma_merge_new_with_close()
905 ASSERT_EQ(vma->vm_pgoff, 0); in test_vma_merge_new_with_close()
906 ASSERT_EQ(vma->vm_ops, &vm_ops); in test_vma_merge_new_with_close()
908 ASSERT_EQ(mm.map_count, 2); in test_vma_merge_new_with_close()
910 cleanup_mm(&mm, &vmi); in test_vma_merge_new_with_close()
917 struct mm_struct mm = {}; in test_merge_existing() local
918 VMA_ITERATOR(vmi, &mm, 0); in test_merge_existing()
921 .mm = &mm, in test_merge_existing()
929 * Merge right case - partial span. in test_merge_existing()
931 * <-> in test_merge_existing()
934 * -> in test_merge_existing()
938 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
939 vma->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
940 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
941 vma_next->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
942 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); in test_merge_existing()
945 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
948 ASSERT_EQ(vma_next->vm_start, 0x3000); in test_merge_existing()
949 ASSERT_EQ(vma_next->vm_end, 0x9000); in test_merge_existing()
950 ASSERT_EQ(vma_next->vm_pgoff, 3); in test_merge_existing()
951 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); in test_merge_existing()
952 ASSERT_EQ(vma->vm_start, 0x2000); in test_merge_existing()
953 ASSERT_EQ(vma->vm_end, 0x3000); in test_merge_existing()
954 ASSERT_EQ(vma->vm_pgoff, 2); in test_merge_existing()
957 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
960 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
963 * Merge right case - full span. in test_merge_existing()
965 * <--> in test_merge_existing()
968 * -> in test_merge_existing()
972 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
973 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
974 vma_next->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
975 vmg_set_range(&vmg, 0x2000, 0x6000, 2, flags); in test_merge_existing()
977 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
980 ASSERT_EQ(vma_next->vm_start, 0x2000); in test_merge_existing()
981 ASSERT_EQ(vma_next->vm_end, 0x9000); in test_merge_existing()
982 ASSERT_EQ(vma_next->vm_pgoff, 2); in test_merge_existing()
983 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); in test_merge_existing()
985 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
988 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
991 * Merge left case - partial span. in test_merge_existing()
993 * <-> in test_merge_existing()
996 * -> in test_merge_existing()
1000 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1001 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1002 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1003 vma->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1004 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); in test_merge_existing()
1007 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1011 ASSERT_EQ(vma_prev->vm_start, 0); in test_merge_existing()
1012 ASSERT_EQ(vma_prev->vm_end, 0x6000); in test_merge_existing()
1013 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_merge_existing()
1014 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_merge_existing()
1015 ASSERT_EQ(vma->vm_start, 0x6000); in test_merge_existing()
1016 ASSERT_EQ(vma->vm_end, 0x7000); in test_merge_existing()
1017 ASSERT_EQ(vma->vm_pgoff, 6); in test_merge_existing()
1020 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
1023 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
1026 * Merge left case - full span. in test_merge_existing()
1028 * <--> in test_merge_existing()
1031 * -> in test_merge_existing()
1035 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1036 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1037 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1038 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1041 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1044 ASSERT_EQ(vma_prev->vm_start, 0); in test_merge_existing()
1045 ASSERT_EQ(vma_prev->vm_end, 0x7000); in test_merge_existing()
1046 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_merge_existing()
1047 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_merge_existing()
1049 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1052 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1057 * <--> in test_merge_existing()
1060 * -> in test_merge_existing()
1064 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1065 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1066 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1067 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_merge_existing()
1068 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1071 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1074 ASSERT_EQ(vma_prev->vm_start, 0); in test_merge_existing()
1075 ASSERT_EQ(vma_prev->vm_end, 0x9000); in test_merge_existing()
1076 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_merge_existing()
1077 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_merge_existing()
1079 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1082 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1085 * Non-merge ranges. the modified VMA merge operation assumes that the in test_merge_existing()
1089 * - in test_merge_existing()
1090 * - in test_merge_existing()
1091 * - in test_merge_existing()
1092 * <-> in test_merge_existing()
1099 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1100 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_merge_existing()
1101 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags); in test_merge_existing()
1103 vmg_set_range(&vmg, 0x4000, 0x5000, 4, flags); in test_merge_existing()
1109 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); in test_merge_existing()
1115 vmg_set_range(&vmg, 0x6000, 0x7000, 6, flags); in test_merge_existing()
1121 vmg_set_range(&vmg, 0x4000, 0x7000, 4, flags); in test_merge_existing()
1127 vmg_set_range(&vmg, 0x4000, 0x6000, 4, flags); in test_merge_existing()
1133 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); in test_merge_existing()
1139 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_merge_existing()
1147 struct mm_struct mm = {}; in test_anon_vma_non_mergeable() local
1148 VMA_ITERATOR(vmi, &mm, 0); in test_anon_vma_non_mergeable()
1151 .mm = &mm, in test_anon_vma_non_mergeable()
1166 * <--> in test_anon_vma_non_mergeable()
1169 * -> in test_anon_vma_non_mergeable()
1173 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1174 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1175 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1179 * merge with the NULL vmg->anon_vma. in test_anon_vma_non_mergeable()
1184 INIT_LIST_HEAD(&vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1185 list_add(&dummy_anon_vma_chain1.same_vma, &vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1186 ASSERT_TRUE(list_is_singular(&vma_prev->anon_vma_chain)); in test_anon_vma_non_mergeable()
1187 vma_prev->anon_vma = &dummy_anon_vma; in test_anon_vma_non_mergeable()
1188 ASSERT_TRUE(is_mergeable_anon_vma(NULL, vma_prev->anon_vma, vma_prev)); in test_anon_vma_non_mergeable()
1190 INIT_LIST_HEAD(&vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1191 list_add(&dummy_anon_vma_chain2.same_vma, &vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1192 ASSERT_TRUE(list_is_singular(&vma_next->anon_vma_chain)); in test_anon_vma_non_mergeable()
1193 vma_next->anon_vma = (struct anon_vma *)2; in test_anon_vma_non_mergeable()
1194 ASSERT_TRUE(is_mergeable_anon_vma(NULL, vma_next->anon_vma, vma_next)); in test_anon_vma_non_mergeable()
1196 ASSERT_FALSE(is_mergeable_anon_vma(vma_prev->anon_vma, vma_next->anon_vma, NULL)); in test_anon_vma_non_mergeable()
1198 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1204 ASSERT_EQ(vma_prev->vm_start, 0); in test_anon_vma_non_mergeable()
1205 ASSERT_EQ(vma_prev->vm_end, 0x7000); in test_anon_vma_non_mergeable()
1206 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_anon_vma_non_mergeable()
1211 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1217 * <--> in test_anon_vma_non_mergeable()
1220 * -> in test_anon_vma_non_mergeable()
1224 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1225 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1227 INIT_LIST_HEAD(&vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1228 list_add(&dummy_anon_vma_chain1.same_vma, &vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1229 vma_prev->anon_vma = (struct anon_vma *)1; in test_anon_vma_non_mergeable()
1231 INIT_LIST_HEAD(&vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1232 list_add(&dummy_anon_vma_chain2.same_vma, &vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1233 vma_next->anon_vma = (struct anon_vma *)2; in test_anon_vma_non_mergeable()
1235 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1240 ASSERT_EQ(vma_prev->vm_start, 0); in test_anon_vma_non_mergeable()
1241 ASSERT_EQ(vma_prev->vm_end, 0x7000); in test_anon_vma_non_mergeable()
1242 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_anon_vma_non_mergeable()
1247 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1255 struct mm_struct mm = {}; in test_dup_anon_vma() local
1256 VMA_ITERATOR(vmi, &mm, 0); in test_dup_anon_vma()
1258 .mm = &mm, in test_dup_anon_vma()
1275 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1276 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1277 vma_next->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1279 vmg_set_range(&vmg, 0, 0x5000, 0, flags); in test_dup_anon_vma()
1283 ASSERT_EQ(expand_existing(&vmg), 0); in test_dup_anon_vma()
1286 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1287 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1290 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1295 * |<----->| in test_dup_anon_vma()
1296 * |-------*********-------| in test_dup_anon_vma()
1301 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1302 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1303 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1306 INIT_LIST_HEAD(&vma_next->anon_vma_chain); in test_dup_anon_vma()
1307 list_add(&dummy_anon_vma_chain.same_vma, &vma_next->anon_vma_chain); in test_dup_anon_vma()
1309 vma_next->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1310 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1317 ASSERT_EQ(vma_prev->vm_start, 0); in test_dup_anon_vma()
1318 ASSERT_EQ(vma_prev->vm_end, 0x8000); in test_dup_anon_vma()
1320 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1321 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1323 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1328 * |<----->| in test_dup_anon_vma()
1329 * |-------*********-------| in test_dup_anon_vma()
1334 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1335 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1336 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1338 vma->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1339 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1346 ASSERT_EQ(vma_prev->vm_start, 0); in test_dup_anon_vma()
1347 ASSERT_EQ(vma_prev->vm_end, 0x8000); in test_dup_anon_vma()
1349 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1350 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1352 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1357 * |<----->| in test_dup_anon_vma()
1358 * |-------************* in test_dup_anon_vma()
1363 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1364 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_dup_anon_vma()
1366 vma->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1367 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1374 ASSERT_EQ(vma_prev->vm_start, 0); in test_dup_anon_vma()
1375 ASSERT_EQ(vma_prev->vm_end, 0x5000); in test_dup_anon_vma()
1377 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1378 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1380 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1385 * |<----->| in test_dup_anon_vma()
1386 * *************-------| in test_dup_anon_vma()
1391 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, flags); in test_dup_anon_vma()
1392 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1394 vma->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1395 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1402 ASSERT_EQ(vma_next->vm_start, 0x3000); in test_dup_anon_vma()
1403 ASSERT_EQ(vma_next->vm_end, 0x8000); in test_dup_anon_vma()
1405 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1406 ASSERT_TRUE(vma_next->anon_vma->was_cloned); in test_dup_anon_vma()
1408 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1415 struct mm_struct mm = {}; in test_vmi_prealloc_fail() local
1416 VMA_ITERATOR(vmi, &mm, 0); in test_vmi_prealloc_fail()
1418 .mm = &mm, in test_vmi_prealloc_fail()
1429 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1430 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1431 vma->anon_vma = &dummy_anon_vma; in test_vmi_prealloc_fail()
1433 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1443 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_vmi_prealloc_fail()
1448 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ in test_vmi_prealloc_fail()
1456 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1457 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1458 vma->anon_vma = &dummy_anon_vma; in test_vmi_prealloc_fail()
1460 vmg_set_range(&vmg, 0, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1465 ASSERT_EQ(expand_existing(&vmg), -ENOMEM); in test_vmi_prealloc_fail()
1468 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_vmi_prealloc_fail()
1472 cleanup_mm(&mm, &vmi); in test_vmi_prealloc_fail()
1479 struct mm_struct mm = {}; in test_merge_extend() local
1480 VMA_ITERATOR(vmi, &mm, 0x1000); in test_merge_extend()
1483 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags); in test_merge_extend()
1484 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_extend()
1490 * <-> in test_merge_extend()
1495 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma); in test_merge_extend()
1496 ASSERT_EQ(vma->vm_start, 0); in test_merge_extend()
1497 ASSERT_EQ(vma->vm_end, 0x4000); in test_merge_extend()
1498 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_extend()
1500 ASSERT_EQ(mm.map_count, 1); in test_merge_extend()
1502 cleanup_mm(&mm, &vmi); in test_merge_extend()
1509 struct mm_struct mm = {}; in test_copy_vma() local
1511 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
1516 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_copy_vma()
1517 vma_new = copy_vma(&vma, 0, 0x2000, 0, &need_locks); in test_copy_vma()
1520 ASSERT_EQ(vma_new->vm_start, 0); in test_copy_vma()
1521 ASSERT_EQ(vma_new->vm_end, 0x2000); in test_copy_vma()
1522 ASSERT_EQ(vma_new->vm_pgoff, 0); in test_copy_vma()
1524 cleanup_mm(&mm, &vmi); in test_copy_vma()
1528 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_copy_vma()
1529 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, flags); in test_copy_vma()
1530 vma_new = copy_vma(&vma, 0x4000, 0x2000, 4, &need_locks); in test_copy_vma()
1534 cleanup_mm(&mm, &vmi); in test_copy_vma()
1541 struct mm_struct mm = {}; in test_expand_only_mode() local
1542 VMA_ITERATOR(vmi, &mm, 0); in test_expand_only_mode()
1544 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, flags, 5); in test_expand_only_mode()
1552 alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_expand_only_mode()
1556 * 0x9000. in test_expand_only_mode()
1558 vma_iter_set(&vmi, 0x3000); in test_expand_only_mode()
1559 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_expand_only_mode()
1567 ASSERT_EQ(vma->vm_start, 0x3000); in test_expand_only_mode()
1568 ASSERT_EQ(vma->vm_end, 0x9000); in test_expand_only_mode()
1569 ASSERT_EQ(vma->vm_pgoff, 3); in test_expand_only_mode()
1571 ASSERT_EQ(vma_iter_addr(&vmi), 0x3000); in test_expand_only_mode()
1573 cleanup_mm(&mm, &vmi); in test_expand_only_mode()
1579 struct mm_struct mm = {}; in test_mmap_region_basic() local
1582 VMA_ITERATOR(vmi, &mm, 0); in test_mmap_region_basic()
1584 current->mm = &mm; in test_mmap_region_basic()
1586 /* Map at 0x300000, length 0x3000. */ in test_mmap_region_basic()
1587 addr = __mmap_region(NULL, 0x300000, 0x3000, in test_mmap_region_basic()
1589 0x300, NULL); in test_mmap_region_basic()
1590 ASSERT_EQ(addr, 0x300000); in test_mmap_region_basic()
1592 /* Map at 0x250000, length 0x3000. */ in test_mmap_region_basic()
1593 addr = __mmap_region(NULL, 0x250000, 0x3000, in test_mmap_region_basic()
1595 0x250, NULL); in test_mmap_region_basic()
1596 ASSERT_EQ(addr, 0x250000); in test_mmap_region_basic()
1598 /* Map at 0x303000, merging to 0x300000 of length 0x6000. */ in test_mmap_region_basic()
1599 addr = __mmap_region(NULL, 0x303000, 0x3000, in test_mmap_region_basic()
1601 0x303, NULL); in test_mmap_region_basic()
1602 ASSERT_EQ(addr, 0x303000); in test_mmap_region_basic()
1604 /* Map at 0x24d000, merging to 0x250000 of length 0x6000. */ in test_mmap_region_basic()
1605 addr = __mmap_region(NULL, 0x24d000, 0x3000, in test_mmap_region_basic()
1607 0x24d, NULL); in test_mmap_region_basic()
1608 ASSERT_EQ(addr, 0x24d000); in test_mmap_region_basic()
1610 ASSERT_EQ(mm.map_count, 2); in test_mmap_region_basic()
1613 if (vma->vm_start == 0x300000) { in test_mmap_region_basic()
1614 ASSERT_EQ(vma->vm_end, 0x306000); in test_mmap_region_basic()
1615 ASSERT_EQ(vma->vm_pgoff, 0x300); in test_mmap_region_basic()
1616 } else if (vma->vm_start == 0x24d000) { in test_mmap_region_basic()
1617 ASSERT_EQ(vma->vm_end, 0x253000); in test_mmap_region_basic()
1618 ASSERT_EQ(vma->vm_pgoff, 0x24d); in test_mmap_region_basic()
1624 cleanup_mm(&mm, &vmi); in test_mmap_region_basic()
1630 int num_tests = 0, num_fail = 0; in main()
1641 } while (0) in main()
1666 num_tests, num_tests - num_fail, num_fail); in main()
1668 return num_fail == 0 ? EXIT_SUCCESS : EXIT_FAILURE; in main()