Lines Matching full:indirect

429 static void vcn_v2_0_mc_resume_dpg_mode(struct amdgpu_device *adev, bool indirect)  in vcn_v2_0_mc_resume_dpg_mode()  argument
436 if (!indirect) { in vcn_v2_0_mc_resume_dpg_mode()
439 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_lo), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
442 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_hi), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
444 UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
447 UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
449 UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
451 UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
457 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
460 upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
464 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
467 if (!indirect) in vcn_v2_0_mc_resume_dpg_mode()
469 UVD, 0, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
472 UVD, 0, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
475 if (!indirect) { in vcn_v2_0_mc_resume_dpg_mode()
478 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
481 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
483 UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
486 UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
488 UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
490 UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
493 UVD, 0, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
498 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
501 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
503 UVD, 0, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
505 UVD, 0, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
510 lower_32_bits(adev->vcn.inst->fw_shared.gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
513 upper_32_bits(adev->vcn.inst->fw_shared.gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
515 UVD, 0, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
518 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_fw_shared)), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
522 UVD, 0, mmUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
638 uint8_t sram_sel, uint8_t indirect) in vcn_v2_0_clock_gating_dpg_mode() argument
670 UVD, 0, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
674 UVD, 0, mmUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
678 UVD, 0, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
682 UVD, 0, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v2_0_clock_gating_dpg_mode()
837 static int vcn_v2_0_start_dpg_mode(struct amdgpu_device *adev, bool indirect) in vcn_v2_0_start_dpg_mode() argument
851 if (indirect) in vcn_v2_0_start_dpg_mode()
855 vcn_v2_0_clock_gating_dpg_mode(adev, 0, indirect); in vcn_v2_0_start_dpg_mode()
862 UVD, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v2_0_start_dpg_mode()
866 UVD, 0, mmUVD_MASTINT_EN), 0, 0, indirect); in vcn_v2_0_start_dpg_mode()
878 UVD, 0, mmUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v2_0_start_dpg_mode()
882 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v2_0_start_dpg_mode()
889 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v2_0_start_dpg_mode()
896 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v2_0_start_dpg_mode()
902 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v2_0_start_dpg_mode()
904 vcn_v2_0_mc_resume_dpg_mode(adev, indirect); in vcn_v2_0_start_dpg_mode()
907 UVD, 0, mmUVD_REG_XX_MASK), 0x10, 0, indirect); in vcn_v2_0_start_dpg_mode()
909 UVD, 0, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect); in vcn_v2_0_start_dpg_mode()
913 UVD, 0, mmUVD_SOFT_RESET), 0, 0, indirect); in vcn_v2_0_start_dpg_mode()
918 0x1F << UVD_LMI_CTRL2__RE_OFLD_MIF_WR_REQ_NUM__SHIFT, 0, indirect); in vcn_v2_0_start_dpg_mode()
923 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v2_0_start_dpg_mode()
925 if (indirect) in vcn_v2_0_start_dpg_mode()
1506 * vcn_v2_0_dec_ring_emit_ib - execute indirect buffer
1510 * @ib: indirect buffer to execute
1513 * Write ring commands to execute the indirect buffer
1681 * vcn_v2_0_enc_ring_emit_ib - enc execute indirect buffer
1685 * @ib: indirect buffer to execute
1688 * Write enc ring commands to execute the indirect buffer