Lines Matching full:vgpu
96 static void read_vreg(struct intel_vgpu *vgpu, unsigned int offset, in read_vreg() argument
99 memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes); in read_vreg()
102 static void write_vreg(struct intel_vgpu *vgpu, unsigned int offset, in write_vreg() argument
105 memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes); in write_vreg()
182 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason) in enter_failsafe_mode() argument
197 pr_err("Now vgpu %d will enter failsafe mode.\n", vgpu->id); in enter_failsafe_mode()
198 vgpu->failsafe = true; in enter_failsafe_mode()
201 static int sanitize_fence_mmio_access(struct intel_vgpu *vgpu, in sanitize_fence_mmio_access() argument
204 unsigned int max_fence = vgpu_fence_sz(vgpu); in sanitize_fence_mmio_access()
212 * and we will let vgpu enter failsafe mode. in sanitize_fence_mmio_access()
214 if (!vgpu->pv_notified) in sanitize_fence_mmio_access()
215 enter_failsafe_mode(vgpu, in sanitize_fence_mmio_access()
224 static int gamw_echo_dev_rw_ia_write(struct intel_vgpu *vgpu, in gamw_echo_dev_rw_ia_write() argument
229 if (GRAPHICS_VER(vgpu->gvt->gt->i915) <= 10) { in gamw_echo_dev_rw_ia_write()
231 gvt_dbg_core("vgpu%d: ips enabled\n", vgpu->id); in gamw_echo_dev_rw_ia_write()
233 gvt_dbg_core("vgpu%d: ips disabled\n", vgpu->id); in gamw_echo_dev_rw_ia_write()
235 /* All engines must be enabled together for vGPU, in gamw_echo_dev_rw_ia_write()
245 write_vreg(vgpu, offset, p_data, bytes); in gamw_echo_dev_rw_ia_write()
249 static int fence_mmio_read(struct intel_vgpu *vgpu, unsigned int off, in fence_mmio_read() argument
254 ret = sanitize_fence_mmio_access(vgpu, offset_to_fence_num(off), in fence_mmio_read()
258 read_vreg(vgpu, off, p_data, bytes); in fence_mmio_read()
262 static int fence_mmio_write(struct intel_vgpu *vgpu, unsigned int off, in fence_mmio_write() argument
265 struct intel_gvt *gvt = vgpu->gvt; in fence_mmio_write()
269 ret = sanitize_fence_mmio_access(vgpu, fence_num, p_data, bytes); in fence_mmio_write()
272 write_vreg(vgpu, off, p_data, bytes); in fence_mmio_write()
275 intel_vgpu_write_fence(vgpu, fence_num, in fence_mmio_write()
276 vgpu_vreg64(vgpu, fence_num_to_offset(fence_num))); in fence_mmio_write()
286 static int mul_force_wake_write(struct intel_vgpu *vgpu, in mul_force_wake_write() argument
292 old = vgpu_vreg(vgpu, offset); in mul_force_wake_write()
295 if (GRAPHICS_VER(vgpu->gvt->gt->i915) >= 9) { in mul_force_wake_write()
315 vgpu_vreg(vgpu, offset) = new; in mul_force_wake_write()
316 vgpu_vreg(vgpu, ack_reg_offset) = (new & GENMASK(15, 0)); in mul_force_wake_write()
320 static int gdrst_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in gdrst_mmio_write() argument
326 write_vreg(vgpu, offset, p_data, bytes); in gdrst_mmio_write()
327 data = vgpu_vreg(vgpu, offset); in gdrst_mmio_write()
330 gvt_dbg_mmio("vgpu%d: request full GPU reset\n", vgpu->id); in gdrst_mmio_write()
334 gvt_dbg_mmio("vgpu%d: request RCS reset\n", vgpu->id); in gdrst_mmio_write()
338 gvt_dbg_mmio("vgpu%d: request VCS reset\n", vgpu->id); in gdrst_mmio_write()
342 gvt_dbg_mmio("vgpu%d: request BCS Reset\n", vgpu->id); in gdrst_mmio_write()
346 gvt_dbg_mmio("vgpu%d: request VECS Reset\n", vgpu->id); in gdrst_mmio_write()
350 gvt_dbg_mmio("vgpu%d: request VCS2 Reset\n", vgpu->id); in gdrst_mmio_write()
354 gvt_dbg_mmio("vgpu%d: request GUC Reset\n", vgpu->id); in gdrst_mmio_write()
355 vgpu_vreg_t(vgpu, GUC_STATUS) |= GS_MIA_IN_RESET; in gdrst_mmio_write()
357 engine_mask &= vgpu->gvt->gt->info.engine_mask; in gdrst_mmio_write()
361 intel_gvt_reset_vgpu_locked(vgpu, false, engine_mask); in gdrst_mmio_write()
364 vgpu_vreg(vgpu, offset) = 0; in gdrst_mmio_write()
369 static int gmbus_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in gmbus_mmio_read() argument
372 return intel_gvt_i2c_handle_gmbus_read(vgpu, offset, p_data, bytes); in gmbus_mmio_read()
375 static int gmbus_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in gmbus_mmio_write() argument
378 return intel_gvt_i2c_handle_gmbus_write(vgpu, offset, p_data, bytes); in gmbus_mmio_write()
381 static int pch_pp_control_mmio_write(struct intel_vgpu *vgpu, in pch_pp_control_mmio_write() argument
384 write_vreg(vgpu, offset, p_data, bytes); in pch_pp_control_mmio_write()
386 if (vgpu_vreg(vgpu, offset) & PANEL_POWER_ON) { in pch_pp_control_mmio_write()
387 vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_ON; in pch_pp_control_mmio_write()
388 vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_SEQUENCE_STATE_ON_IDLE; in pch_pp_control_mmio_write()
389 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_SEQUENCE_POWER_DOWN; in pch_pp_control_mmio_write()
390 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_CYCLE_DELAY_ACTIVE; in pch_pp_control_mmio_write()
393 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= in pch_pp_control_mmio_write()
399 static int transconf_mmio_write(struct intel_vgpu *vgpu, in transconf_mmio_write() argument
402 write_vreg(vgpu, offset, p_data, bytes); in transconf_mmio_write()
404 if (vgpu_vreg(vgpu, offset) & TRANS_ENABLE) in transconf_mmio_write()
405 vgpu_vreg(vgpu, offset) |= TRANS_STATE_ENABLE; in transconf_mmio_write()
407 vgpu_vreg(vgpu, offset) &= ~TRANS_STATE_ENABLE; in transconf_mmio_write()
411 static int lcpll_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in lcpll_ctl_mmio_write() argument
414 write_vreg(vgpu, offset, p_data, bytes); in lcpll_ctl_mmio_write()
416 if (vgpu_vreg(vgpu, offset) & LCPLL_PLL_DISABLE) in lcpll_ctl_mmio_write()
417 vgpu_vreg(vgpu, offset) &= ~LCPLL_PLL_LOCK; in lcpll_ctl_mmio_write()
419 vgpu_vreg(vgpu, offset) |= LCPLL_PLL_LOCK; in lcpll_ctl_mmio_write()
421 if (vgpu_vreg(vgpu, offset) & LCPLL_CD_SOURCE_FCLK) in lcpll_ctl_mmio_write()
422 vgpu_vreg(vgpu, offset) |= LCPLL_CD_SOURCE_FCLK_DONE; in lcpll_ctl_mmio_write()
424 vgpu_vreg(vgpu, offset) &= ~LCPLL_CD_SOURCE_FCLK_DONE; in lcpll_ctl_mmio_write()
429 static int dpy_reg_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in dpy_reg_mmio_read() argument
437 vgpu_vreg(vgpu, offset) = 1 << 17; in dpy_reg_mmio_read()
440 vgpu_vreg(vgpu, offset) = 0x3; in dpy_reg_mmio_read()
443 vgpu_vreg(vgpu, offset) = 0x2f << 16; in dpy_reg_mmio_read()
449 read_vreg(vgpu, offset, p_data, bytes); in dpy_reg_mmio_read()
454 * Only PIPE_A is enabled in current vGPU display and PIPE_A is tied to
469 static u32 bdw_vgpu_get_dp_bitrate(struct intel_vgpu *vgpu, enum port port) in bdw_vgpu_get_dp_bitrate() argument
472 u32 ddi_pll_sel = vgpu_vreg_t(vgpu, PORT_CLK_SEL(port)); in bdw_vgpu_get_dp_bitrate()
486 switch (vgpu_vreg_t(vgpu, SPLL_CTL) & SPLL_FREQ_MASK) { in bdw_vgpu_get_dp_bitrate()
497 gvt_dbg_dpy("vgpu-%d PORT_%c can't get freq from SPLL 0x%08x\n", in bdw_vgpu_get_dp_bitrate()
498 vgpu->id, port_name(port), vgpu_vreg_t(vgpu, SPLL_CTL)); in bdw_vgpu_get_dp_bitrate()
510 wrpll_ctl = vgpu_vreg_t(vgpu, WRPLL_CTL(DPLL_ID_WRPLL1)); in bdw_vgpu_get_dp_bitrate()
512 wrpll_ctl = vgpu_vreg_t(vgpu, WRPLL_CTL(DPLL_ID_WRPLL2)); in bdw_vgpu_get_dp_bitrate()
516 refclk = vgpu->gvt->gt->i915->display.dpll.ref_clks.ssc; in bdw_vgpu_get_dp_bitrate()
522 gvt_dbg_dpy("vgpu-%d PORT_%c WRPLL can't get refclk 0x%08x\n", in bdw_vgpu_get_dp_bitrate()
523 vgpu->id, port_name(port), wrpll_ctl); in bdw_vgpu_get_dp_bitrate()
535 gvt_dbg_dpy("vgpu-%d PORT_%c has invalid clock select 0x%08x\n", in bdw_vgpu_get_dp_bitrate()
536 vgpu->id, port_name(port), vgpu_vreg_t(vgpu, PORT_CLK_SEL(port))); in bdw_vgpu_get_dp_bitrate()
544 static u32 bxt_vgpu_get_dp_bitrate(struct intel_vgpu *vgpu, enum port port) in bxt_vgpu_get_dp_bitrate() argument
547 int refclk = vgpu->gvt->gt->i915->display.dpll.ref_clks.nssc; in bxt_vgpu_get_dp_bitrate()
568 gvt_dbg_dpy("vgpu-%d no PHY for PORT_%c\n", vgpu->id, port_name(port)); in bxt_vgpu_get_dp_bitrate()
572 temp = vgpu_vreg_t(vgpu, BXT_PORT_PLL_ENABLE(port)); in bxt_vgpu_get_dp_bitrate()
574 gvt_dbg_dpy("vgpu-%d PORT_%c PLL_ENABLE 0x%08x isn't enabled or locked\n", in bxt_vgpu_get_dp_bitrate()
575 vgpu->id, port_name(port), temp); in bxt_vgpu_get_dp_bitrate()
581 vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 0))) << 22; in bxt_vgpu_get_dp_bitrate()
582 if (vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 3)) & PORT_PLL_M2_FRAC_ENABLE) in bxt_vgpu_get_dp_bitrate()
584 vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 2))); in bxt_vgpu_get_dp_bitrate()
586 vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 1))); in bxt_vgpu_get_dp_bitrate()
588 vgpu_vreg_t(vgpu, BXT_PORT_PLL_EBB_0(phy, ch))); in bxt_vgpu_get_dp_bitrate()
590 vgpu_vreg_t(vgpu, BXT_PORT_PLL_EBB_0(phy, ch))); in bxt_vgpu_get_dp_bitrate()
595 gvt_dbg_dpy("vgpu-%d PORT_%c PLL has invalid divider\n", vgpu->id, port_name(port)); in bxt_vgpu_get_dp_bitrate()
608 static u32 skl_vgpu_get_dp_bitrate(struct intel_vgpu *vgpu, enum port port) in skl_vgpu_get_dp_bitrate() argument
614 if (!(vgpu_vreg_t(vgpu, DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_OFF(port)) && in skl_vgpu_get_dp_bitrate()
615 (vgpu_vreg_t(vgpu, DPLL_CTRL2) & DPLL_CTRL2_DDI_SEL_OVERRIDE(port))) { in skl_vgpu_get_dp_bitrate()
616 dpll_id += (vgpu_vreg_t(vgpu, DPLL_CTRL2) & in skl_vgpu_get_dp_bitrate()
620 gvt_dbg_dpy("vgpu-%d DPLL for PORT_%c isn't turned on\n", in skl_vgpu_get_dp_bitrate()
621 vgpu->id, port_name(port)); in skl_vgpu_get_dp_bitrate()
626 switch ((vgpu_vreg_t(vgpu, DPLL_CTRL1) & in skl_vgpu_get_dp_bitrate()
649 gvt_dbg_dpy("vgpu-%d PORT_%c fail to get DPLL-%d freq\n", in skl_vgpu_get_dp_bitrate()
650 vgpu->id, port_name(port), dpll_id); in skl_vgpu_get_dp_bitrate()
656 static void vgpu_update_refresh_rate(struct intel_vgpu *vgpu) in vgpu_update_refresh_rate() argument
658 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in vgpu_update_refresh_rate()
664 port = (vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(display, TRANSCODER_A)) & in vgpu_update_refresh_rate()
667 gvt_dbg_dpy("vgpu-%d unsupported PORT_%c\n", vgpu->id, port_name(port)); in vgpu_update_refresh_rate()
673 dp_br = bdw_vgpu_get_dp_bitrate(vgpu, port); in vgpu_update_refresh_rate()
675 dp_br = bxt_vgpu_get_dp_bitrate(vgpu, port); in vgpu_update_refresh_rate()
677 dp_br = skl_vgpu_get_dp_bitrate(vgpu, port); in vgpu_update_refresh_rate()
680 link_m = vgpu_vreg_t(vgpu, PIPE_LINK_M1(display, TRANSCODER_A)); in vgpu_update_refresh_rate()
681 link_n = vgpu_vreg_t(vgpu, PIPE_LINK_N1(display, TRANSCODER_A)); in vgpu_update_refresh_rate()
684 htotal = (vgpu_vreg_t(vgpu, TRANS_HTOTAL(display, TRANSCODER_A)) >> TRANS_HTOTAL_SHIFT); in vgpu_update_refresh_rate()
685 vtotal = (vgpu_vreg_t(vgpu, TRANS_VTOTAL(display, TRANSCODER_A)) >> TRANS_VTOTAL_SHIFT); in vgpu_update_refresh_rate()
690 u32 *old_rate = &(intel_vgpu_port(vgpu, vgpu->display.port_num)->vrefresh_k); in vgpu_update_refresh_rate()
702 gvt_dbg_dpy("vgpu-%d PIPE_%c refresh rate updated to %d\n", in vgpu_update_refresh_rate()
703 vgpu->id, pipe_name(PIPE_A), new_rate); in vgpu_update_refresh_rate()
707 static int pipeconf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pipeconf_mmio_write() argument
712 write_vreg(vgpu, offset, p_data, bytes); in pipeconf_mmio_write()
713 data = vgpu_vreg(vgpu, offset); in pipeconf_mmio_write()
716 vgpu_vreg(vgpu, offset) |= TRANSCONF_STATE_ENABLE; in pipeconf_mmio_write()
717 vgpu_update_refresh_rate(vgpu); in pipeconf_mmio_write()
718 vgpu_update_vblank_emulation(vgpu, true); in pipeconf_mmio_write()
720 vgpu_vreg(vgpu, offset) &= ~TRANSCONF_STATE_ENABLE; in pipeconf_mmio_write()
721 vgpu_update_vblank_emulation(vgpu, false); in pipeconf_mmio_write()
780 static int force_nonpriv_write(struct intel_vgpu *vgpu, in force_nonpriv_write() argument
785 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in force_nonpriv_write()
788 gvt_err("vgpu(%d) Invalid FORCE_NONPRIV offset %x(%dB)\n", in force_nonpriv_write()
789 vgpu->id, offset, bytes); in force_nonpriv_write()
795 gvt_err("vgpu(%d) Invalid FORCE_NONPRIV write %x at offset %x\n", in force_nonpriv_write()
796 vgpu->id, reg_nonpriv, offset); in force_nonpriv_write()
798 intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes); in force_nonpriv_write()
803 static int ddi_buf_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in ddi_buf_ctl_mmio_write() argument
806 write_vreg(vgpu, offset, p_data, bytes); in ddi_buf_ctl_mmio_write()
808 if (vgpu_vreg(vgpu, offset) & DDI_BUF_CTL_ENABLE) { in ddi_buf_ctl_mmio_write()
809 vgpu_vreg(vgpu, offset) &= ~DDI_BUF_IS_IDLE; in ddi_buf_ctl_mmio_write()
811 vgpu_vreg(vgpu, offset) |= DDI_BUF_IS_IDLE; in ddi_buf_ctl_mmio_write()
813 vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) in ddi_buf_ctl_mmio_write()
819 static int fdi_rx_iir_mmio_write(struct intel_vgpu *vgpu, in fdi_rx_iir_mmio_write() argument
822 vgpu_vreg(vgpu, offset) &= ~*(u32 *)p_data; in fdi_rx_iir_mmio_write()
829 static int fdi_auto_training_started(struct intel_vgpu *vgpu) in fdi_auto_training_started() argument
831 u32 ddi_buf_ctl = vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_E)); in fdi_auto_training_started()
832 u32 rx_ctl = vgpu_vreg(vgpu, _FDI_RXA_CTL); in fdi_auto_training_started()
833 u32 tx_ctl = vgpu_vreg_t(vgpu, DP_TP_CTL(PORT_E)); in fdi_auto_training_started()
845 static int check_fdi_rx_train_status(struct intel_vgpu *vgpu, in check_fdi_rx_train_status() argument
874 if (vgpu_vreg_t(vgpu, fdi_rx_imr) & fdi_iir_check_bits) in check_fdi_rx_train_status()
877 if (((vgpu_vreg_t(vgpu, fdi_tx_ctl) & fdi_tx_check_bits) in check_fdi_rx_train_status()
879 && ((vgpu_vreg_t(vgpu, fdi_rx_ctl) & fdi_rx_check_bits) in check_fdi_rx_train_status()
911 static int update_fdi_rx_iir_status(struct intel_vgpu *vgpu, in update_fdi_rx_iir_status() argument
929 write_vreg(vgpu, offset, p_data, bytes); in update_fdi_rx_iir_status()
933 ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN1); in update_fdi_rx_iir_status()
937 vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_BIT_LOCK; in update_fdi_rx_iir_status()
939 ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN2); in update_fdi_rx_iir_status()
943 vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_SYMBOL_LOCK; in update_fdi_rx_iir_status()
946 if (fdi_auto_training_started(vgpu)) in update_fdi_rx_iir_status()
947 vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) |= in update_fdi_rx_iir_status()
955 static int dp_tp_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in dp_tp_ctl_mmio_write() argument
962 write_vreg(vgpu, offset, p_data, bytes); in dp_tp_ctl_mmio_write()
965 data = (vgpu_vreg(vgpu, offset) & GENMASK(10, 8)) >> 8; in dp_tp_ctl_mmio_write()
968 vgpu_vreg_t(vgpu, status_reg) |= (1 << 25); in dp_tp_ctl_mmio_write()
973 static int dp_tp_status_mmio_write(struct intel_vgpu *vgpu, in dp_tp_status_mmio_write() argument
982 vgpu_vreg(vgpu, offset) = (reg_val & ~sticky_mask) | in dp_tp_status_mmio_write()
983 (vgpu_vreg(vgpu, offset) & sticky_mask); in dp_tp_status_mmio_write()
984 vgpu_vreg(vgpu, offset) &= ~(reg_val & sticky_mask); in dp_tp_status_mmio_write()
988 static int pch_adpa_mmio_write(struct intel_vgpu *vgpu, in pch_adpa_mmio_write() argument
993 write_vreg(vgpu, offset, p_data, bytes); in pch_adpa_mmio_write()
994 data = vgpu_vreg(vgpu, offset); in pch_adpa_mmio_write()
997 vgpu_vreg(vgpu, offset) &= ~ADPA_CRT_HOTPLUG_FORCE_TRIGGER; in pch_adpa_mmio_write()
1001 static int south_chicken2_mmio_write(struct intel_vgpu *vgpu, in south_chicken2_mmio_write() argument
1006 write_vreg(vgpu, offset, p_data, bytes); in south_chicken2_mmio_write()
1007 data = vgpu_vreg(vgpu, offset); in south_chicken2_mmio_write()
1010 vgpu_vreg(vgpu, offset) |= FDI_MPHY_IOSFSB_RESET_STATUS; in south_chicken2_mmio_write()
1012 vgpu_vreg(vgpu, offset) &= ~FDI_MPHY_IOSFSB_RESET_STATUS; in south_chicken2_mmio_write()
1019 static int pri_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pri_surf_mmio_write() argument
1022 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pri_surf_mmio_write()
1027 write_vreg(vgpu, offset, p_data, bytes); in pri_surf_mmio_write()
1028 vgpu_vreg_t(vgpu, DSPSURFLIVE(display, pipe)) = vgpu_vreg(vgpu, offset); in pri_surf_mmio_write()
1030 vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(display, pipe))++; in pri_surf_mmio_write()
1032 if (vgpu_vreg_t(vgpu, DSPCNTR(display, pipe)) & PLANE_CTL_ASYNC_FLIP) in pri_surf_mmio_write()
1033 intel_vgpu_trigger_virtual_event(vgpu, event); in pri_surf_mmio_write()
1035 set_bit(event, vgpu->irq.flip_done_event[pipe]); in pri_surf_mmio_write()
1043 static int spr_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in spr_surf_mmio_write() argument
1049 write_vreg(vgpu, offset, p_data, bytes); in spr_surf_mmio_write()
1050 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in spr_surf_mmio_write()
1052 if (vgpu_vreg_t(vgpu, SPRCTL(pipe)) & PLANE_CTL_ASYNC_FLIP) in spr_surf_mmio_write()
1053 intel_vgpu_trigger_virtual_event(vgpu, event); in spr_surf_mmio_write()
1055 set_bit(event, vgpu->irq.flip_done_event[pipe]); in spr_surf_mmio_write()
1060 static int reg50080_mmio_write(struct intel_vgpu *vgpu, in reg50080_mmio_write() argument
1064 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in reg50080_mmio_write()
1070 write_vreg(vgpu, offset, p_data, bytes); in reg50080_mmio_write()
1072 vgpu_vreg_t(vgpu, DSPSURFLIVE(display, pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write()
1073 vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(display, pipe))++; in reg50080_mmio_write()
1075 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write()
1078 if ((vgpu_vreg(vgpu, offset) & REG50080_FLIP_TYPE_MASK) == REG50080_FLIP_TYPE_ASYNC) in reg50080_mmio_write()
1079 intel_vgpu_trigger_virtual_event(vgpu, event); in reg50080_mmio_write()
1081 set_bit(event, vgpu->irq.flip_done_event[pipe]); in reg50080_mmio_write()
1086 static int trigger_aux_channel_interrupt(struct intel_vgpu *vgpu, in trigger_aux_channel_interrupt() argument
1089 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in trigger_aux_channel_interrupt()
1108 intel_vgpu_trigger_virtual_event(vgpu, event); in trigger_aux_channel_interrupt()
1112 static int dp_aux_ch_ctl_trans_done(struct intel_vgpu *vgpu, u32 value, in dp_aux_ch_ctl_trans_done() argument
1128 vgpu_vreg(vgpu, reg) = value; in dp_aux_ch_ctl_trans_done()
1131 return trigger_aux_channel_interrupt(vgpu, reg); in dp_aux_ch_ctl_trans_done()
1176 static int dp_aux_ch_ctl_mmio_write(struct intel_vgpu *vgpu, in dp_aux_ch_ctl_mmio_write() argument
1179 struct intel_vgpu_display *display = &vgpu->display; in dp_aux_ch_ctl_mmio_write()
1191 write_vreg(vgpu, offset, p_data, bytes); in dp_aux_ch_ctl_mmio_write()
1192 data = vgpu_vreg(vgpu, offset); in dp_aux_ch_ctl_mmio_write()
1194 if (GRAPHICS_VER(vgpu->gvt->gt->i915) >= 9 && in dp_aux_ch_ctl_mmio_write()
1198 } else if (IS_BROADWELL(vgpu->gvt->gt->i915) && in dp_aux_ch_ctl_mmio_write()
1208 vgpu_vreg(vgpu, offset) = 0; in dp_aux_ch_ctl_mmio_write()
1216 msg = vgpu_vreg(vgpu, offset + 4); in dp_aux_ch_ctl_mmio_write()
1236 vgpu_vreg(vgpu, offset + 4) = AUX_NATIVE_REPLY_NAK; in dp_aux_ch_ctl_mmio_write()
1237 dp_aux_ch_ctl_trans_done(vgpu, data, offset, 2, true); in dp_aux_ch_ctl_mmio_write()
1253 u32 r = vgpu_vreg(vgpu, offset + 8 + t * 4); in dp_aux_ch_ctl_mmio_write()
1275 vgpu_vreg(vgpu, offset + 4) = 0; in dp_aux_ch_ctl_mmio_write()
1276 dp_aux_ch_ctl_trans_done(vgpu, data, offset, 1, in dp_aux_ch_ctl_mmio_write()
1294 vgpu_vreg(vgpu, offset + 4) = 0; in dp_aux_ch_ctl_mmio_write()
1295 vgpu_vreg(vgpu, offset + 8) = 0; in dp_aux_ch_ctl_mmio_write()
1296 vgpu_vreg(vgpu, offset + 12) = 0; in dp_aux_ch_ctl_mmio_write()
1297 vgpu_vreg(vgpu, offset + 16) = 0; in dp_aux_ch_ctl_mmio_write()
1298 vgpu_vreg(vgpu, offset + 20) = 0; in dp_aux_ch_ctl_mmio_write()
1300 dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2, in dp_aux_ch_ctl_mmio_write()
1307 vgpu_vreg(vgpu, offset + 4 * idx) = 0; in dp_aux_ch_ctl_mmio_write()
1329 vgpu_vreg(vgpu, offset + in dp_aux_ch_ctl_mmio_write()
1335 dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2, in dp_aux_ch_ctl_mmio_write()
1341 intel_gvt_i2c_handle_aux_ch_write(vgpu, port_index, offset, p_data); in dp_aux_ch_ctl_mmio_write()
1344 trigger_aux_channel_interrupt(vgpu, offset); in dp_aux_ch_ctl_mmio_write()
1348 static int mbctl_write(struct intel_vgpu *vgpu, unsigned int offset, in mbctl_write() argument
1352 write_vreg(vgpu, offset, p_data, bytes); in mbctl_write()
1356 static int vga_control_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in vga_control_mmio_write() argument
1361 write_vreg(vgpu, offset, p_data, bytes); in vga_control_mmio_write()
1362 vga_disable = vgpu_vreg(vgpu, offset) & VGA_DISP_DISABLE; in vga_control_mmio_write()
1364 gvt_dbg_core("vgpu%d: %s VGA mode\n", vgpu->id, in vga_control_mmio_write()
1369 static u32 read_virtual_sbi_register(struct intel_vgpu *vgpu, in read_virtual_sbi_register() argument
1372 struct intel_vgpu_display *display = &vgpu->display; in read_virtual_sbi_register()
1386 static void write_virtual_sbi_register(struct intel_vgpu *vgpu, in write_virtual_sbi_register() argument
1389 struct intel_vgpu_display *display = &vgpu->display; in write_virtual_sbi_register()
1410 static int sbi_data_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in sbi_data_mmio_read() argument
1413 if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >> in sbi_data_mmio_read()
1415 unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) & in sbi_data_mmio_read()
1417 vgpu_vreg(vgpu, offset) = read_virtual_sbi_register(vgpu, in sbi_data_mmio_read()
1420 read_vreg(vgpu, offset, p_data, bytes); in sbi_data_mmio_read()
1424 static int sbi_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in sbi_ctl_mmio_write() argument
1429 write_vreg(vgpu, offset, p_data, bytes); in sbi_ctl_mmio_write()
1430 data = vgpu_vreg(vgpu, offset); in sbi_ctl_mmio_write()
1438 vgpu_vreg(vgpu, offset) = data; in sbi_ctl_mmio_write()
1440 if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >> in sbi_ctl_mmio_write()
1442 unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) & in sbi_ctl_mmio_write()
1445 write_virtual_sbi_register(vgpu, sbi_offset, in sbi_ctl_mmio_write()
1446 vgpu_vreg_t(vgpu, SBI_DATA)); in sbi_ctl_mmio_write()
1454 static int pvinfo_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in pvinfo_mmio_read() argument
1459 read_vreg(vgpu, offset, p_data, bytes); in pvinfo_mmio_read()
1482 vgpu->pv_notified = true; in pvinfo_mmio_read()
1486 static int handle_g2v_notification(struct intel_vgpu *vgpu, int notification) in handle_g2v_notification() argument
1492 pdps = (u64 *)&vgpu_vreg64_t(vgpu, vgtif_reg(pdp[0])); in handle_g2v_notification()
1499 mm = intel_vgpu_get_ppgtt_mm(vgpu, root_entry_type, pdps); in handle_g2v_notification()
1503 return intel_vgpu_put_ppgtt_mm(vgpu, pdps); in handle_g2v_notification()
1514 static int send_display_ready_uevent(struct intel_vgpu *vgpu, int ready) in send_display_ready_uevent() argument
1516 struct kobject *kobj = &vgpu->gvt->gt->i915->drm.primary->kdev->kobj; in send_display_ready_uevent()
1524 snprintf(vmid_str, 20, "VMID=%d", vgpu->id); in send_display_ready_uevent()
1530 static int pvinfo_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pvinfo_mmio_write() argument
1538 send_display_ready_uevent(vgpu, data ? 1 : 0); in pvinfo_mmio_write()
1541 handle_g2v_notification(vgpu, data); in pvinfo_mmio_write()
1559 enter_failsafe_mode(vgpu, GVT_FAILSAFE_INSUFFICIENT_RESOURCE); in pvinfo_mmio_write()
1569 write_vreg(vgpu, offset, p_data, bytes); in pvinfo_mmio_write()
1574 static int pf_write(struct intel_vgpu *vgpu, in pf_write() argument
1577 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in pf_write()
1585 vgpu->id); in pf_write()
1589 return intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes); in pf_write()
1592 static int power_well_ctl_mmio_write(struct intel_vgpu *vgpu, in power_well_ctl_mmio_write() argument
1595 write_vreg(vgpu, offset, p_data, bytes); in power_well_ctl_mmio_write()
1597 if (vgpu_vreg(vgpu, offset) & in power_well_ctl_mmio_write()
1599 vgpu_vreg(vgpu, offset) |= in power_well_ctl_mmio_write()
1602 vgpu_vreg(vgpu, offset) &= in power_well_ctl_mmio_write()
1607 static int gen9_dbuf_ctl_mmio_write(struct intel_vgpu *vgpu, in gen9_dbuf_ctl_mmio_write() argument
1610 write_vreg(vgpu, offset, p_data, bytes); in gen9_dbuf_ctl_mmio_write()
1612 if (vgpu_vreg(vgpu, offset) & DBUF_POWER_REQUEST) in gen9_dbuf_ctl_mmio_write()
1613 vgpu_vreg(vgpu, offset) |= DBUF_POWER_STATE; in gen9_dbuf_ctl_mmio_write()
1615 vgpu_vreg(vgpu, offset) &= ~DBUF_POWER_STATE; in gen9_dbuf_ctl_mmio_write()
1620 static int fpga_dbg_mmio_write(struct intel_vgpu *vgpu, in fpga_dbg_mmio_write() argument
1623 write_vreg(vgpu, offset, p_data, bytes); in fpga_dbg_mmio_write()
1625 if (vgpu_vreg(vgpu, offset) & FPGA_DBG_RM_NOCLAIM) in fpga_dbg_mmio_write()
1626 vgpu_vreg(vgpu, offset) &= ~FPGA_DBG_RM_NOCLAIM; in fpga_dbg_mmio_write()
1630 static int dma_ctrl_write(struct intel_vgpu *vgpu, unsigned int offset, in dma_ctrl_write() argument
1633 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in dma_ctrl_write()
1636 write_vreg(vgpu, offset, p_data, bytes); in dma_ctrl_write()
1637 mode = vgpu_vreg(vgpu, offset); in dma_ctrl_write()
1642 vgpu->id); in dma_ctrl_write()
1649 static int gen9_trtte_write(struct intel_vgpu *vgpu, unsigned int offset, in gen9_trtte_write() argument
1652 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gen9_trtte_write()
1658 vgpu->id); in gen9_trtte_write()
1661 write_vreg(vgpu, offset, p_data, bytes); in gen9_trtte_write()
1666 static int gen9_trtt_chicken_write(struct intel_vgpu *vgpu, unsigned int offset, in gen9_trtt_chicken_write() argument
1669 write_vreg(vgpu, offset, p_data, bytes); in gen9_trtt_chicken_write()
1673 static int dpll_status_read(struct intel_vgpu *vgpu, unsigned int offset, in dpll_status_read() argument
1678 if (vgpu_vreg(vgpu, 0x46010) & (1 << 31)) in dpll_status_read()
1681 if (vgpu_vreg(vgpu, 0x46014) & (1 << 31)) in dpll_status_read()
1684 if (vgpu_vreg(vgpu, 0x46040) & (1 << 31)) in dpll_status_read()
1687 if (vgpu_vreg(vgpu, 0x46060) & (1 << 31)) in dpll_status_read()
1690 vgpu_vreg(vgpu, offset) = v; in dpll_status_read()
1692 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in dpll_status_read()
1695 static int mailbox_write(struct intel_vgpu *vgpu, unsigned int offset, in mailbox_write() argument
1700 u32 *data0 = &vgpu_vreg_t(vgpu, GEN6_PCODE_DATA); in mailbox_write()
1704 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1705 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1706 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1707 IS_COMETLAKE(vgpu->gvt->gt->i915)) { in mailbox_write()
1717 } else if (IS_BROXTON(vgpu->gvt->gt->i915)) { in mailbox_write()
1730 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1731 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1732 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1733 IS_COMETLAKE(vgpu->gvt->gt->i915)) in mailbox_write()
1742 vgpu->id, value, *data0); in mailbox_write()
1750 return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes); in mailbox_write()
1753 static int hws_pga_write(struct intel_vgpu *vgpu, unsigned int offset, in hws_pga_write() argument
1758 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in hws_pga_write()
1761 !intel_gvt_ggtt_validate_range(vgpu, value, I915_GTT_PAGE_SIZE)) { in hws_pga_write()
1777 vgpu->hws_pga[engine->id] = value; in hws_pga_write()
1779 vgpu->id, value, offset); in hws_pga_write()
1781 return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes); in hws_pga_write()
1784 static int skl_power_well_ctl_write(struct intel_vgpu *vgpu, in skl_power_well_ctl_write() argument
1789 if (IS_BROXTON(vgpu->gvt->gt->i915)) in skl_power_well_ctl_write()
1796 return intel_vgpu_default_mmio_write(vgpu, offset, &v, bytes); in skl_power_well_ctl_write()
1799 static int skl_lcpll_write(struct intel_vgpu *vgpu, unsigned int offset, in skl_lcpll_write() argument
1808 vgpu_vreg(vgpu, offset) = v; in skl_lcpll_write()
1813 static int bxt_de_pll_enable_write(struct intel_vgpu *vgpu, in bxt_de_pll_enable_write() argument
1821 vgpu_vreg(vgpu, offset) = v; in bxt_de_pll_enable_write()
1826 static int bxt_port_pll_enable_write(struct intel_vgpu *vgpu, in bxt_port_pll_enable_write() argument
1834 vgpu_vreg(vgpu, offset) = v; in bxt_port_pll_enable_write()
1839 static int bxt_phy_ctl_family_write(struct intel_vgpu *vgpu, in bxt_phy_ctl_family_write() argument
1847 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_A) = data; in bxt_phy_ctl_family_write()
1850 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_B) = data; in bxt_phy_ctl_family_write()
1851 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_C) = data; in bxt_phy_ctl_family_write()
1855 vgpu_vreg(vgpu, offset) = v; in bxt_phy_ctl_family_write()
1860 static int bxt_port_tx_dw3_read(struct intel_vgpu *vgpu, in bxt_port_tx_dw3_read() argument
1863 u32 v = vgpu_vreg(vgpu, offset); in bxt_port_tx_dw3_read()
1867 vgpu_vreg(vgpu, offset) = v; in bxt_port_tx_dw3_read()
1869 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in bxt_port_tx_dw3_read()
1872 static int bxt_pcs_dw12_grp_write(struct intel_vgpu *vgpu, in bxt_pcs_dw12_grp_write() argument
1878 vgpu_vreg(vgpu, offset - 0x600) = v; in bxt_pcs_dw12_grp_write()
1879 vgpu_vreg(vgpu, offset - 0x800) = v; in bxt_pcs_dw12_grp_write()
1881 vgpu_vreg(vgpu, offset - 0x400) = v; in bxt_pcs_dw12_grp_write()
1882 vgpu_vreg(vgpu, offset - 0x600) = v; in bxt_pcs_dw12_grp_write()
1885 vgpu_vreg(vgpu, offset) = v; in bxt_pcs_dw12_grp_write()
1890 static int bxt_gt_disp_pwron_write(struct intel_vgpu *vgpu, in bxt_gt_disp_pwron_write() argument
1896 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) &= in bxt_gt_disp_pwron_write()
1898 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) |= in bxt_gt_disp_pwron_write()
1903 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) &= in bxt_gt_disp_pwron_write()
1905 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) |= in bxt_gt_disp_pwron_write()
1910 vgpu_vreg(vgpu, offset) = v; in bxt_gt_disp_pwron_write()
1915 static int edp_psr_imr_iir_write(struct intel_vgpu *vgpu, in edp_psr_imr_iir_write() argument
1918 vgpu_vreg(vgpu, offset) = 0; in edp_psr_imr_iir_write()
1932 static int bxt_ppat_low_write(struct intel_vgpu *vgpu, unsigned int offset, in bxt_ppat_low_write() argument
1945 vgpu_vreg(vgpu, offset) = lower_32_bits(pat); in bxt_ppat_low_write()
1950 static int guc_status_read(struct intel_vgpu *vgpu, in guc_status_read() argument
1955 read_vreg(vgpu, offset, p_data, bytes); in guc_status_read()
1956 vgpu_vreg(vgpu, offset) &= ~GS_MIA_IN_RESET; in guc_status_read()
1960 static int mmio_read_from_hw(struct intel_vgpu *vgpu, in mmio_read_from_hw() argument
1963 struct intel_gvt *gvt = vgpu->gvt; in mmio_read_from_hw()
1975 vgpu == gvt->scheduler.engine_owner[engine->id] || in mmio_read_from_hw()
1979 vgpu_vreg(vgpu, offset) = in mmio_read_from_hw()
1984 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in mmio_read_from_hw()
1987 static int elsp_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in elsp_mmio_write() argument
1990 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in elsp_mmio_write()
1991 const struct intel_engine_cs *engine = intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in elsp_mmio_write()
2001 * vGPU reset, it's set on D0->D3 on PCI config write, and cleared after in elsp_mmio_write()
2002 * vGPU reset if in resuming. in elsp_mmio_write()
2004 * S3 resume, but no vGPU reset (triggered by QEMU devic model). After in elsp_mmio_write()
2006 * remains set which will break next vGPU reset logic (miss the expected in elsp_mmio_write()
2011 if (vgpu->d3_entered) in elsp_mmio_write()
2012 vgpu->d3_entered = false; in elsp_mmio_write()
2014 execlist = &vgpu->submission.execlist[engine->id]; in elsp_mmio_write()
2018 ret = intel_vgpu_submit_execlist(vgpu, engine); in elsp_mmio_write()
2029 static int ring_mode_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in ring_mode_mmio_write() argument
2034 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in ring_mode_mmio_write()
2039 if (IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
2040 IS_COMETLAKE(vgpu->gvt->gt->i915)) in ring_mode_mmio_write()
2042 write_vreg(vgpu, offset, p_data, bytes); in ring_mode_mmio_write()
2045 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
2049 if ((IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
2050 IS_COMETLAKE(vgpu->gvt->gt->i915)) && in ring_mode_mmio_write()
2052 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
2062 !vgpu->pv_notified) { in ring_mode_mmio_write()
2063 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
2077 ret = intel_vgpu_select_submission_ops(vgpu, in ring_mode_mmio_write()
2083 intel_vgpu_start_schedule(vgpu); in ring_mode_mmio_write()
2088 static int gvt_reg_tlb_control_handler(struct intel_vgpu *vgpu, in gvt_reg_tlb_control_handler() argument
2093 write_vreg(vgpu, offset, p_data, bytes); in gvt_reg_tlb_control_handler()
2094 vgpu_vreg(vgpu, offset) = 0; in gvt_reg_tlb_control_handler()
2115 set_bit(id, (void *)vgpu->submission.tlb_handle_pending); in gvt_reg_tlb_control_handler()
2120 static int ring_reset_ctl_write(struct intel_vgpu *vgpu, in ring_reset_ctl_write() argument
2125 write_vreg(vgpu, offset, p_data, bytes); in ring_reset_ctl_write()
2126 data = vgpu_vreg(vgpu, offset); in ring_reset_ctl_write()
2133 vgpu_vreg(vgpu, offset) = data; in ring_reset_ctl_write()
2137 static int csfe_chicken1_mmio_write(struct intel_vgpu *vgpu, in csfe_chicken1_mmio_write() argument
2144 write_vreg(vgpu, offset, p_data, bytes); in csfe_chicken1_mmio_write()
2148 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in csfe_chicken1_mmio_write()
3065 * @vgpu: a vGPU
3073 int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_default_mmio_read() argument
3076 read_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_default_mmio_read()
3082 * @vgpu: a vGPU
3090 int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_default_mmio_write() argument
3093 write_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_default_mmio_write()
3099 * @vgpu: a vGPU
3107 int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_mask_mmio_write() argument
3112 old_vreg = vgpu_vreg(vgpu, offset); in intel_vgpu_mask_mmio_write()
3113 write_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_mask_mmio_write()
3114 mask = vgpu_vreg(vgpu, offset) >> 16; in intel_vgpu_mask_mmio_write()
3115 vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) | in intel_vgpu_mask_mmio_write()
3116 (vgpu_vreg(vgpu, offset) & mask); in intel_vgpu_mask_mmio_write()
3140 * @vgpu: a vGPU
3149 int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_mmio_reg_rw() argument
3152 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_mmio_reg_rw()
3153 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_mmio_reg_rw()
3169 return func(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3183 return mmio_info->read(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3190 old_vreg = vgpu_vreg(vgpu, offset); in intel_vgpu_mmio_reg_rw()
3194 ret = mmio_info->write(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3202 data |= vgpu_vreg(vgpu, offset) & ro_mask; in intel_vgpu_mmio_reg_rw()
3203 ret = mmio_info->write(vgpu, offset, &data, bytes); in intel_vgpu_mmio_reg_rw()
3208 u32 mask = vgpu_vreg(vgpu, offset) >> 16; in intel_vgpu_mmio_reg_rw()
3210 vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) in intel_vgpu_mmio_reg_rw()
3211 | (vgpu_vreg(vgpu, offset) & mask); in intel_vgpu_mmio_reg_rw()
3219 intel_vgpu_default_mmio_read(vgpu, offset, pdata, bytes) : in intel_vgpu_mmio_reg_rw()
3220 intel_vgpu_default_mmio_write(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3225 struct intel_vgpu *vgpu; in intel_gvt_restore_fence() local
3228 idr_for_each_entry(&(gvt)->vgpu_idr, vgpu, id) { in intel_gvt_restore_fence()
3230 for (i = 0; i < vgpu_fence_sz(vgpu); i++) in intel_gvt_restore_fence()
3231 intel_vgpu_write_fence(vgpu, i, vgpu_vreg64(vgpu, fence_num_to_offset(i))); in intel_gvt_restore_fence()
3238 struct intel_vgpu *vgpu = data; in mmio_pm_restore_handler() local
3242 intel_uncore_write(&dev_priv->uncore, _MMIO(offset), vgpu_vreg(vgpu, offset)); in mmio_pm_restore_handler()
3249 struct intel_vgpu *vgpu; in intel_gvt_restore_mmio() local
3252 idr_for_each_entry(&(gvt)->vgpu_idr, vgpu, id) { in intel_gvt_restore_mmio()
3254 intel_gvt_for_each_tracked_mmio(gvt, mmio_pm_restore_handler, vgpu); in intel_gvt_restore_mmio()