Lines Matching +full:higher +full:- +full:end
1 /* SPDX-License-Identifier: GPL-2.0-only */
14 #define KVM_MMU_LOCK_INIT(kvm) rwlock_init(&(kvm)->mmu_lock)
15 #define KVM_MMU_LOCK(kvm) write_lock(&(kvm)->mmu_lock)
16 #define KVM_MMU_UNLOCK(kvm) write_unlock(&(kvm)->mmu_lock)
18 #define KVM_MMU_LOCK_INIT(kvm) spin_lock_init(&(kvm)->mmu_lock)
19 #define KVM_MMU_LOCK(kvm) spin_lock(&(kvm)->mmu_lock)
20 #define KVM_MMU_UNLOCK(kvm) spin_unlock(&(kvm)->mmu_lock)
41 * If non-NULL, try to get a writable mapping even for a read fault.
50 * (e.g. tail pages of non-compound higher order allocations from
61 unsigned long end);
65 unsigned long end) in gfn_to_pfn_cache_invalidate_start() argument
87 return -EIO; in kvm_gmem_bind()