Lines Matching +full:mm +full:- +full:0

1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * address space "slices" (meta-segments) support
15 #include <linux/mm.h>
21 #include <linux/sched/mm.h>
39 (int)SLICE_NUM_LOW, &mask->low_slices); in slice_print_mask()
41 (int)SLICE_NUM_HIGH, mask->high_slices); in slice_print_mask()
44 #define slice_dbg(fmt...) do { if (_slice_debug) pr_devel(fmt); } while (0)
63 unsigned long end = start + len - 1; in slice_range_to_mask()
65 ret->low_slices = 0; in slice_range_to_mask()
67 bitmap_zero(ret->high_slices, SLICE_NUM_HIGH); in slice_range_to_mask()
71 (unsigned long)(SLICE_LOW_TOP - 1)); in slice_range_to_mask()
73 ret->low_slices = (1u << (GET_LOW_SLICE_INDEX(mend) + 1)) in slice_range_to_mask()
74 - (1u << GET_LOW_SLICE_INDEX(start)); in slice_range_to_mask()
80 unsigned long count = GET_HIGH_SLICE_INDEX(align_end) - start_index; in slice_range_to_mask()
82 bitmap_set(ret->high_slices, start_index, count); in slice_range_to_mask()
86 static int slice_area_is_free(struct mm_struct *mm, unsigned long addr, in slice_area_is_free() argument
91 if ((mm_ctx_slb_addr_limit(&mm->context) - len) < addr) in slice_area_is_free()
92 return 0; in slice_area_is_free()
93 vma = find_vma(mm, addr); in slice_area_is_free()
97 static int slice_low_has_vma(struct mm_struct *mm, unsigned long slice) in slice_low_has_vma() argument
99 return !slice_area_is_free(mm, slice << SLICE_LOW_SHIFT, in slice_low_has_vma()
103 static int slice_high_has_vma(struct mm_struct *mm, unsigned long slice) in slice_high_has_vma() argument
110 * at 4GB, not 0 */ in slice_high_has_vma()
111 if (start == 0) in slice_high_has_vma()
114 return !slice_area_is_free(mm, start, end - start); in slice_high_has_vma()
117 static void slice_mask_for_free(struct mm_struct *mm, struct slice_mask *ret, in slice_mask_for_free() argument
122 ret->low_slices = 0; in slice_mask_for_free()
124 bitmap_zero(ret->high_slices, SLICE_NUM_HIGH); in slice_mask_for_free()
126 for (i = 0; i < SLICE_NUM_LOW; i++) in slice_mask_for_free()
127 if (!slice_low_has_vma(mm, i)) in slice_mask_for_free()
128 ret->low_slices |= 1u << i; in slice_mask_for_free()
130 if (slice_addr_is_low(high_limit - 1)) in slice_mask_for_free()
133 for (i = 0; i < GET_HIGH_SLICE_INDEX(high_limit); i++) in slice_mask_for_free()
134 if (!slice_high_has_vma(mm, i)) in slice_mask_for_free()
135 __set_bit(i, ret->high_slices); in slice_mask_for_free()
138 static bool slice_check_range_fits(struct mm_struct *mm, in slice_check_range_fits() argument
142 unsigned long end = start + len - 1; in slice_check_range_fits()
143 u64 low_slices = 0; in slice_check_range_fits()
147 (unsigned long)(SLICE_LOW_TOP - 1)); in slice_check_range_fits()
150 - (1u << GET_LOW_SLICE_INDEX(start)); in slice_check_range_fits()
152 if ((low_slices & available->low_slices) != low_slices) in slice_check_range_fits()
158 unsigned long count = GET_HIGH_SLICE_INDEX(align_end) - start_index; in slice_check_range_fits()
162 if (!test_bit(i, available->high_slices)) in slice_check_range_fits()
173 struct mm_struct *mm = parm; in slice_flush_segments() local
176 if (mm != current->active_mm) in slice_flush_segments()
179 copy_mm_to_paca(current->active_mm); in slice_flush_segments()
187 static void slice_convert(struct mm_struct *mm, in slice_convert() argument
197 slice_dbg("slice_convert(mm=%p, psize=%d)\n", mm, psize); in slice_convert()
200 psize_mask = slice_mask_for_size(&mm->context, psize); in slice_convert()
203 * concurrent 64k -> 4k demotion ... in slice_convert()
207 lpsizes = mm_ctx_low_slices(&mm->context); in slice_convert()
208 for (i = 0; i < SLICE_NUM_LOW; i++) { in slice_convert()
209 if (!(mask->low_slices & (1u << i))) in slice_convert()
212 mask_index = i & 0x1; in slice_convert()
216 old_psize = (lpsizes[index] >> (mask_index * 4)) & 0xf; in slice_convert()
217 old_mask = slice_mask_for_size(&mm->context, old_psize); in slice_convert()
218 old_mask->low_slices &= ~(1u << i); in slice_convert()
219 psize_mask->low_slices |= 1u << i; in slice_convert()
222 lpsizes[index] = (lpsizes[index] & ~(0xf << (mask_index * 4))) | in slice_convert()
226 hpsizes = mm_ctx_high_slices(&mm->context); in slice_convert()
227 for (i = 0; i < GET_HIGH_SLICE_INDEX(mm_ctx_slb_addr_limit(&mm->context)); i++) { in slice_convert()
228 if (!test_bit(i, mask->high_slices)) in slice_convert()
231 mask_index = i & 0x1; in slice_convert()
235 old_psize = (hpsizes[index] >> (mask_index * 4)) & 0xf; in slice_convert()
236 old_mask = slice_mask_for_size(&mm->context, old_psize); in slice_convert()
237 __clear_bit(i, old_mask->high_slices); in slice_convert()
238 __set_bit(i, psize_mask->high_slices); in slice_convert()
241 hpsizes[index] = (hpsizes[index] & ~(0xf << (mask_index * 4))) | in slice_convert()
246 (unsigned long)mm_ctx_low_slices(&mm->context), in slice_convert()
247 (unsigned long)mm_ctx_high_slices(&mm->context)); in slice_convert()
251 copro_flush_all_slbs(mm); in slice_convert()
269 return !!(available->low_slices & (1u << slice)); in slice_scan_available()
274 return !!test_bit(slice, available->high_slices); in slice_scan_available()
278 static unsigned long slice_find_area_bottomup(struct mm_struct *mm, in slice_find_area_bottomup() argument
287 .align_mask = PAGE_MASK & ((1ul << pshift) - 1), in slice_find_area_bottomup()
317 return -ENOMEM; in slice_find_area_bottomup()
320 static unsigned long slice_find_area_topdown(struct mm_struct *mm, in slice_find_area_topdown() argument
330 .align_mask = PAGE_MASK & ((1ul << pshift) - 1), in slice_find_area_topdown()
341 addr += mm_ctx_slb_addr_limit(&mm->context) - DEFAULT_MAP_WINDOW; in slice_find_area_topdown()
345 if (!slice_scan_available(addr - 1, available, 0, &addr)) in slice_find_area_topdown()
357 else if (slice_scan_available(addr - 1, available, 0, &prev)) { in slice_find_area_topdown()
370 * so fall back to the bottom-up function here. This scenario in slice_find_area_topdown()
374 return slice_find_area_bottomup(mm, TASK_UNMAPPED_BASE, len, available, psize, high_limit); in slice_find_area_topdown()
378 static unsigned long slice_find_area(struct mm_struct *mm, unsigned long len, in slice_find_area() argument
383 return slice_find_area_topdown(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
385 return slice_find_area_bottomup(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
391 dst->low_slices = src->low_slices; in slice_copy_mask()
394 bitmap_copy(dst->high_slices, src->high_slices, SLICE_NUM_HIGH); in slice_copy_mask()
401 dst->low_slices = src1->low_slices | src2->low_slices; in slice_or_mask()
404 bitmap_or(dst->high_slices, src1->high_slices, src2->high_slices, SLICE_NUM_HIGH); in slice_or_mask()
411 dst->low_slices = src1->low_slices & ~src2->low_slices; in slice_andnot_mask()
414 bitmap_andnot(dst->high_slices, src1->high_slices, src2->high_slices, SLICE_NUM_HIGH); in slice_andnot_mask()
434 struct mm_struct *mm = current->mm; in slice_get_unmapped_area() local
443 return -ENOMEM; in slice_get_unmapped_area()
444 if (len & (page_size - 1)) in slice_get_unmapped_area()
445 return -EINVAL; in slice_get_unmapped_area()
447 if (addr & (page_size - 1)) in slice_get_unmapped_area()
448 return -EINVAL; in slice_get_unmapped_area()
449 if (addr > high_limit - len) in slice_get_unmapped_area()
450 return -ENOMEM; in slice_get_unmapped_area()
453 if (high_limit > mm_ctx_slb_addr_limit(&mm->context)) { in slice_get_unmapped_area()
459 mm_ctx_set_slb_addr_limit(&mm->context, high_limit); in slice_get_unmapped_area()
461 on_each_cpu(slice_flush_segments, mm, 1); in slice_get_unmapped_area()
465 BUG_ON(mm->task_size == 0); in slice_get_unmapped_area()
466 BUG_ON(mm_ctx_slb_addr_limit(&mm->context) == 0); in slice_get_unmapped_area()
469 slice_dbg("slice_get_unmapped_area(mm=%p, psize=%d...\n", mm, psize); in slice_get_unmapped_area()
478 if (addr > high_limit - len || addr < mmap_min_addr || in slice_get_unmapped_area()
479 !slice_area_is_free(mm, addr, len)) in slice_get_unmapped_area()
480 addr = 0; in slice_get_unmapped_area()
486 maskp = slice_mask_for_size(&mm->context, psize); in slice_get_unmapped_area()
513 compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K); in slice_get_unmapped_area()
527 if (addr != 0 || fixed) { in slice_get_unmapped_area()
531 if (slice_check_range_fits(mm, &good_mask, addr, len)) { in slice_get_unmapped_area()
540 newaddr = slice_find_area(mm, len, &good_mask, in slice_get_unmapped_area()
542 if (newaddr != -ENOMEM) { in slice_get_unmapped_area()
546 slice_dbg(" found area at 0x%lx\n", newaddr); in slice_get_unmapped_area()
554 slice_mask_for_free(mm, &potential_mask, high_limit); in slice_get_unmapped_area()
558 if (addr != 0 || fixed) { in slice_get_unmapped_area()
559 if (slice_check_range_fits(mm, &potential_mask, addr, len)) { in slice_get_unmapped_area()
568 return -EBUSY; in slice_get_unmapped_area()
576 newaddr = slice_find_area(mm, len, &good_mask, in slice_get_unmapped_area()
578 if (newaddr != -ENOMEM) { in slice_get_unmapped_area()
579 slice_dbg(" found area at 0x%lx\n", newaddr); in slice_get_unmapped_area()
587 newaddr = slice_find_area(mm, len, &potential_mask, in slice_get_unmapped_area()
590 if (IS_ENABLED(CONFIG_PPC_64K_PAGES) && newaddr == -ENOMEM && in slice_get_unmapped_area()
592 /* retry the search with 4k-page slices included */ in slice_get_unmapped_area()
594 newaddr = slice_find_area(mm, len, &potential_mask, in slice_get_unmapped_area()
598 if (newaddr == -ENOMEM) in slice_get_unmapped_area()
599 return -ENOMEM; in slice_get_unmapped_area()
602 slice_dbg(" found potential area at 0x%lx\n", newaddr); in slice_get_unmapped_area()
610 if (need_extra_context(mm, newaddr)) { in slice_get_unmapped_area()
611 if (alloc_extended_context(mm, newaddr) < 0) in slice_get_unmapped_area()
612 return -ENOMEM; in slice_get_unmapped_area()
621 slice_convert(mm, &potential_mask, psize); in slice_get_unmapped_area()
623 on_each_cpu(slice_flush_segments, mm, 1); in slice_get_unmapped_area()
628 if (need_extra_context(mm, newaddr)) { in slice_get_unmapped_area()
629 if (alloc_extended_context(mm, newaddr) < 0) in slice_get_unmapped_area()
630 return -ENOMEM; in slice_get_unmapped_area()
646 return 0; in file_to_psize()
665 psize = mm_ctx_user_psize(&current->mm->context); in arch_get_unmapped_area()
667 return slice_get_unmapped_area(addr, len, flags, psize, 0); in arch_get_unmapped_area()
685 psize = mm_ctx_user_psize(&current->mm->context); in arch_get_unmapped_area_topdown()
690 unsigned int notrace get_slice_psize(struct mm_struct *mm, unsigned long addr) in get_slice_psize() argument
698 psizes = mm_ctx_low_slices(&mm->context); in get_slice_psize()
701 psizes = mm_ctx_high_slices(&mm->context); in get_slice_psize()
704 mask_index = index & 0x1; in get_slice_psize()
705 return (psizes[index >> 1] >> (mask_index * 4)) & 0xf; in get_slice_psize()
709 void slice_init_new_context_exec(struct mm_struct *mm) in slice_init_new_context_exec() argument
715 slice_dbg("slice_init_new_context_exec(mm=%p)\n", mm); in slice_init_new_context_exec()
719 * case of fork it is just inherited from the mm being in slice_init_new_context_exec()
722 mm_ctx_set_slb_addr_limit(&mm->context, SLB_ADDR_LIMIT_DEFAULT); in slice_init_new_context_exec()
723 mm_ctx_set_user_psize(&mm->context, psize); in slice_init_new_context_exec()
728 lpsizes = mm_ctx_low_slices(&mm->context); in slice_init_new_context_exec()
731 hpsizes = mm_ctx_high_slices(&mm->context); in slice_init_new_context_exec()
737 mask = slice_mask_for_size(&mm->context, psize); in slice_init_new_context_exec()
738 mask->low_slices = ~0UL; in slice_init_new_context_exec()
740 bitmap_fill(mask->high_slices, SLICE_NUM_HIGH); in slice_init_new_context_exec()
745 struct mm_struct *mm = current->mm; in slice_setup_new_exec() local
747 slice_dbg("slice_setup_new_exec(mm=%p)\n", mm); in slice_setup_new_exec()
752 mm_ctx_set_slb_addr_limit(&mm->context, DEFAULT_MAP_WINDOW); in slice_setup_new_exec()
755 void slice_set_range_psize(struct mm_struct *mm, unsigned long start, in slice_set_range_psize() argument
763 slice_convert(mm, &mask, psize); in slice_set_range_psize()
782 * generic code will redefine that function as 0 in that. This is ok
786 int slice_is_hugepage_only_range(struct mm_struct *mm, unsigned long addr, in slice_is_hugepage_only_range() argument
790 unsigned int psize = mm_ctx_user_psize(&mm->context); in slice_is_hugepage_only_range()
794 maskp = slice_mask_for_size(&mm->context, psize); in slice_is_hugepage_only_range()
801 compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K); in slice_is_hugepage_only_range()
803 return !slice_check_range_fits(mm, &available, addr, len); in slice_is_hugepage_only_range()
806 return !slice_check_range_fits(mm, maskp, addr, len); in slice_is_hugepage_only_range()
815 return 1UL << mmu_psize_to_shift(get_slice_psize(vma->vm_mm, vma->vm_start)); in vma_mmu_pagesize()