Lines Matching full:mm

13 #include "../../../mm/vma.h"
31 #include "../../../mm/vma.c"
62 static struct vm_area_struct *alloc_vma(struct mm_struct *mm, in alloc_vma() argument
68 struct vm_area_struct *ret = vm_area_alloc(mm); in alloc_vma()
82 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm, in alloc_and_link_vma() argument
88 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, flags); in alloc_and_link_vma()
93 if (vma_link(mm, vma)) { in alloc_and_link_vma()
165 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm, in try_merge_new_vma() argument
186 return alloc_and_link_vma(mm, start, end, pgoff, flags); in try_merge_new_vma()
203 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
217 mtree_destroy(&mm->mm_mt); in cleanup_mm()
218 mm->map_count = 0; in cleanup_mm()
243 struct mm_struct mm = {}; in test_simple_merge() local
244 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_merge()
245 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, flags); in test_simple_merge()
246 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
248 .mm = &mm, in test_simple_merge()
256 ASSERT_FALSE(vma_link(&mm, vma_left)); in test_simple_merge()
257 ASSERT_FALSE(vma_link(&mm, vma_right)); in test_simple_merge()
268 mtree_destroy(&mm.mm_mt); in test_simple_merge()
277 struct mm_struct mm = {}; in test_simple_modify() local
278 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_modify()
279 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
281 ASSERT_FALSE(vma_link(&mm, init_vma)); in test_simple_modify()
328 mtree_destroy(&mm.mm_mt); in test_simple_modify()
336 struct mm_struct mm = {}; in test_simple_expand() local
337 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_expand()
338 VMA_ITERATOR(vmi, &mm, 0); in test_simple_expand()
347 ASSERT_FALSE(vma_link(&mm, vma)); in test_simple_expand()
356 mtree_destroy(&mm.mm_mt); in test_simple_expand()
364 struct mm_struct mm = {}; in test_simple_shrink() local
365 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_shrink()
366 VMA_ITERATOR(vmi, &mm, 0); in test_simple_shrink()
368 ASSERT_FALSE(vma_link(&mm, vma)); in test_simple_shrink()
377 mtree_destroy(&mm.mm_mt); in test_simple_shrink()
385 struct mm_struct mm = {}; in test_merge_new() local
386 VMA_ITERATOR(vmi, &mm, 0); in test_merge_new()
388 .mm = &mm, in test_merge_new()
414 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_merge_new()
420 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_new()
425 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, flags); in test_merge_new()
436 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, flags, &merged); in test_merge_new()
441 ASSERT_EQ(mm.map_count, 4); in test_merge_new()
451 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, flags, &merged); in test_merge_new()
460 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
468 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, flags, &merged); in test_merge_new()
477 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
487 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, flags, &merged); in test_merge_new()
496 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
505 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, flags, &merged); in test_merge_new()
514 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
523 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, flags, &merged); in test_merge_new()
532 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
540 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, flags, &merged); in test_merge_new()
549 ASSERT_EQ(mm.map_count, 1); in test_merge_new()
574 mtree_destroy(&mm.mm_mt); in test_merge_new()
581 struct mm_struct mm = {}; in test_vma_merge_special_flags() local
582 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_special_flags()
584 .mm = &mm, in test_vma_merge_special_flags()
602 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_special_flags()
632 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_vma_merge_special_flags()
646 cleanup_mm(&mm, &vmi); in test_vma_merge_special_flags()
653 struct mm_struct mm = {}; in test_vma_merge_with_close() local
654 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_with_close()
656 .mm = &mm, in test_vma_merge_with_close()
732 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
733 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
743 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
757 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
758 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
772 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
785 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
786 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
799 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
813 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
814 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
815 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
825 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_vma_merge_with_close()
839 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
840 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
841 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
854 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
862 struct mm_struct mm = {}; in test_vma_merge_new_with_close() local
863 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_new_with_close()
865 .mm = &mm, in test_vma_merge_new_with_close()
868 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_vma_merge_new_with_close()
869 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, flags); in test_vma_merge_new_with_close()
908 ASSERT_EQ(mm.map_count, 2); in test_vma_merge_new_with_close()
910 cleanup_mm(&mm, &vmi); in test_vma_merge_new_with_close()
917 struct mm_struct mm = {}; in test_merge_existing() local
918 VMA_ITERATOR(vmi, &mm, 0); in test_merge_existing()
921 .mm = &mm, in test_merge_existing()
938 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
940 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
957 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
960 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
972 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
973 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
985 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
988 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1000 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1002 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1020 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
1023 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
1035 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1037 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1049 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1052 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1064 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1066 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1067 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_merge_existing()
1079 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1082 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1099 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1100 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_merge_existing()
1101 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags); in test_merge_existing()
1139 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_merge_existing()
1147 struct mm_struct mm = {}; in test_anon_vma_non_mergeable() local
1148 VMA_ITERATOR(vmi, &mm, 0); in test_anon_vma_non_mergeable()
1151 .mm = &mm, in test_anon_vma_non_mergeable()
1173 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1174 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1175 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1211 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1224 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1225 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1247 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1255 struct mm_struct mm = {}; in test_dup_anon_vma() local
1256 VMA_ITERATOR(vmi, &mm, 0); in test_dup_anon_vma()
1258 .mm = &mm, in test_dup_anon_vma()
1275 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1276 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1290 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1301 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1302 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1303 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1323 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1334 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1335 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1336 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1352 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1363 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1364 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_dup_anon_vma()
1380 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1391 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, flags); in test_dup_anon_vma()
1392 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1408 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1415 struct mm_struct mm = {}; in test_vmi_prealloc_fail() local
1416 VMA_ITERATOR(vmi, &mm, 0); in test_vmi_prealloc_fail()
1418 .mm = &mm, in test_vmi_prealloc_fail()
1429 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1430 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1448 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ in test_vmi_prealloc_fail()
1456 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1457 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1472 cleanup_mm(&mm, &vmi); in test_vmi_prealloc_fail()
1479 struct mm_struct mm = {}; in test_merge_extend() local
1480 VMA_ITERATOR(vmi, &mm, 0x1000); in test_merge_extend()
1483 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags); in test_merge_extend()
1484 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_extend()
1500 ASSERT_EQ(mm.map_count, 1); in test_merge_extend()
1502 cleanup_mm(&mm, &vmi); in test_merge_extend()
1509 struct mm_struct mm = {}; in test_copy_vma() local
1511 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
1516 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_copy_vma()
1524 cleanup_mm(&mm, &vmi); in test_copy_vma()
1528 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_copy_vma()
1529 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, flags); in test_copy_vma()
1534 cleanup_mm(&mm, &vmi); in test_copy_vma()
1541 struct mm_struct mm = {}; in test_expand_only_mode() local
1542 VMA_ITERATOR(vmi, &mm, 0); in test_expand_only_mode()
1544 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, flags, 5); in test_expand_only_mode()
1552 alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_expand_only_mode()
1559 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_expand_only_mode()
1573 cleanup_mm(&mm, &vmi); in test_expand_only_mode()
1579 struct mm_struct mm = {}; in test_mmap_region_basic() local
1582 VMA_ITERATOR(vmi, &mm, 0); in test_mmap_region_basic()
1584 current->mm = &mm; in test_mmap_region_basic()
1610 ASSERT_EQ(mm.map_count, 2); in test_mmap_region_basic()
1624 cleanup_mm(&mm, &vmi); in test_mmap_region_basic()