Lines Matching +defs:access +defs:offset
2213 u64 offset; member
2628 static void __kvm_synchronize_tsc(struct kvm_vcpu *vcpu, u64 offset, u64 tsc, in __kvm_synchronize_tsc()
2679 u64 offset, ns, elapsed; in kvm_synchronize_tsc() local
3121 unsigned int offset, in kvm_setup_guest_pvclock()
3443 u32 offset, last_msr; in set_msr_mce() local
4124 u32 offset, last_msr; in get_msr_mce() local
4237 u64 offset, ratio; in kvm_get_msr_common() local
4991 u64 offset = kvm_compute_l1_tsc_offset(vcpu, in kvm_arch_vcpu_load() local
5716 u64 offset, tsc, ns; in kvm_arch_tsc_set_attr() local
7538 gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u64 access, in translate_nested_gpa()
7558 u64 access = (kvm_x86_call(get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_read() local
7568 u64 access = (kvm_x86_call(get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_write() local
7584 struct kvm_vcpu *vcpu, u64 access, in kvm_read_guest_virt_helper()
7593 unsigned offset = addr & (PAGE_SIZE-1); in kvm_read_guest_virt_helper() local
7621 u64 access = (kvm_x86_call(get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_fetch_guest_virt() local
7622 unsigned offset; in kvm_fetch_guest_virt() local
7646 u64 access = (kvm_x86_call(get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_read_guest_virt() local
7665 u64 access = 0; in emulator_read_std() local
7676 struct kvm_vcpu *vcpu, u64 access, in kvm_write_guest_virt_helper()
7685 unsigned offset = addr & (PAGE_SIZE-1); in kvm_write_guest_virt_helper() local
7710 u64 access = PFERR_WRITE_MASK; in emulator_write_std() local
7786 u64 access = ((kvm_x86_call(get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0) in vcpu_mmio_gva_to_gpa() local
13373 unsigned int offset = offsetof(struct kvm_vcpu_pv_apf_data, token); in apf_put_user_ready() local
13381 unsigned int offset = offsetof(struct kvm_vcpu_pv_apf_data, token); in apf_pageready_slot_free() local
13675 u64 access = error_code & in kvm_fixup_and_inject_pf_error() local