Lines Matching +full:0 +full:x0f000000

45 #define VCE_STATUS_VCPU_REPORT_FW_LOADED_MASK	0x02
66 if (ring->me == 0) in vce_v4_0_ring_get_rptr()
67 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR)); in vce_v4_0_ring_get_rptr()
69 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2)); in vce_v4_0_ring_get_rptr()
71 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR3)); in vce_v4_0_ring_get_rptr()
88 if (ring->me == 0) in vce_v4_0_ring_get_wptr()
89 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR)); in vce_v4_0_ring_get_wptr()
91 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2)); in vce_v4_0_ring_get_wptr()
93 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3)); in vce_v4_0_ring_get_wptr()
114 if (ring->me == 0) in vce_v4_0_ring_set_wptr()
115 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), in vce_v4_0_ring_set_wptr()
118 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2), in vce_v4_0_ring_set_wptr()
121 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3), in vce_v4_0_ring_set_wptr()
129 for (i = 0; i < 10; ++i) { in vce_v4_0_firmware_loaded()
130 for (j = 0; j < 100; ++j) { in vce_v4_0_firmware_loaded()
132 RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS)); in vce_v4_0_firmware_loaded()
135 return 0; in vce_v4_0_firmware_loaded()
140 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), in vce_v4_0_firmware_loaded()
144 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), 0, in vce_v4_0_firmware_loaded()
156 uint32_t data = 0, loop; in vce_v4_0_mmsch_start()
164 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_ADDR_LO), lower_32_bits(addr)); in vce_v4_0_mmsch_start()
165 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_ADDR_HI), upper_32_bits(addr)); in vce_v4_0_mmsch_start()
168 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_VMID)); in vce_v4_0_mmsch_start()
170 data |= (0 << VCE_MMSCH_VF_VMID__VF_CTX_VMID__SHIFT); /* use domain0 for MM scheduler */ in vce_v4_0_mmsch_start()
171 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_VMID), data); in vce_v4_0_mmsch_start()
174 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_SIZE), size); in vce_v4_0_mmsch_start()
177 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_RESP), 0); in vce_v4_0_mmsch_start()
179 WDOORBELL32(adev->vce.ring[0].doorbell_index, 0); in vce_v4_0_mmsch_start()
180 *adev->vce.ring[0].wptr_cpu_addr = 0; in vce_v4_0_mmsch_start()
181 adev->vce.ring[0].wptr = 0; in vce_v4_0_mmsch_start()
182 adev->vce.ring[0].wptr_old = 0; in vce_v4_0_mmsch_start()
185 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_HOST), 0x10000001); in vce_v4_0_mmsch_start()
187 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_RESP)); in vce_v4_0_mmsch_start()
189 while ((data & 0x10000002) != 0x10000002) { in vce_v4_0_mmsch_start()
191 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_RESP)); in vce_v4_0_mmsch_start()
202 return 0; in vce_v4_0_mmsch_start()
209 uint32_t table_size = 0; in vce_v4_0_sriov_start()
210 struct mmsch_v1_0_cmd_direct_write direct_wt = { { 0 } }; in vce_v4_0_sriov_start()
211 struct mmsch_v1_0_cmd_direct_read_modify_write direct_rd_mod_wt = { { 0 } }; in vce_v4_0_sriov_start()
212 struct mmsch_v1_0_cmd_direct_polling direct_poll = { { 0 } }; in vce_v4_0_sriov_start()
213 struct mmsch_v1_0_cmd_end end = { { 0 } }; in vce_v4_0_sriov_start()
222 if (header->vce_table_offset == 0 && header->vce_table_size == 0) { in vce_v4_0_sriov_start()
226 if (header->uvd_table_offset == 0 && header->uvd_table_size == 0) in vce_v4_0_sriov_start()
233 ring = &adev->vce.ring[0]; in vce_v4_0_sriov_start()
234 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), in vce_v4_0_sriov_start()
236 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI), in vce_v4_0_sriov_start()
238 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE), in vce_v4_0_sriov_start()
242 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL), 0x398000); in vce_v4_0_sriov_start()
243 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CACHE_CTRL), ~0x1, 0); in vce_v4_0_sriov_start()
244 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL), 0); in vce_v4_0_sriov_start()
245 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL1), 0); in vce_v4_0_sriov_start()
246 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VM_CTRL), 0); in vce_v4_0_sriov_start()
254 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
256 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
258 (tmr_mc_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
259 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), 0); in vce_v4_0_sriov_start()
261 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
264 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
266 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
267 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), in vce_v4_0_sriov_start()
268 offset & ~0x0f000000); in vce_v4_0_sriov_start()
271 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
274 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
276 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
277 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
280 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
282 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
285 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE0), size); in vce_v4_0_sriov_start()
287 offset = (adev->firmware.load_type != AMDGPU_FW_LOAD_PSP) ? offset + size : 0; in vce_v4_0_sriov_start()
289 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET1), in vce_v4_0_sriov_start()
290 (offset & ~0x0f000000) | (1 << 24)); in vce_v4_0_sriov_start()
291 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE1), size); in vce_v4_0_sriov_start()
295 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET2), in vce_v4_0_sriov_start()
296 (offset & ~0x0f000000) | (2 << 24)); in vce_v4_0_sriov_start()
297 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE2), size); in vce_v4_0_sriov_start()
299 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL2), ~0x100, 0); in vce_v4_0_sriov_start()
300 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_SYS_INT_EN), in vce_v4_0_sriov_start()
305 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), in vce_v4_0_sriov_start()
307 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CNTL), in vce_v4_0_sriov_start()
308 ~0x200001, VCE_VCPU_CNTL__CLK_EN_MASK); in vce_v4_0_sriov_start()
309 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), in vce_v4_0_sriov_start()
310 ~VCE_SOFT_RESET__ECPU_SOFT_RESET_MASK, 0); in vce_v4_0_sriov_start()
312 MMSCH_V1_0_INSERT_DIRECT_POLL(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), in vce_v4_0_sriov_start()
317 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), in vce_v4_0_sriov_start()
318 ~VCE_STATUS__JOB_BUSY_MASK, 0); in vce_v4_0_sriov_start()
341 ring = &adev->vce.ring[0]; in vce_v4_0_start()
343 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start()
344 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start()
345 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr); in vce_v4_0_start()
346 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI), upper_32_bits(ring->gpu_addr)); in vce_v4_0_start()
347 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE), ring->ring_size / 4); in vce_v4_0_start()
351 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2), lower_32_bits(ring->wptr)); in vce_v4_0_start()
352 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2), lower_32_bits(ring->wptr)); in vce_v4_0_start()
353 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO2), ring->gpu_addr); in vce_v4_0_start()
354 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI2), upper_32_bits(ring->gpu_addr)); in vce_v4_0_start()
355 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE2), ring->ring_size / 4); in vce_v4_0_start()
359 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR3), lower_32_bits(ring->wptr)); in vce_v4_0_start()
360 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3), lower_32_bits(ring->wptr)); in vce_v4_0_start()
361 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO3), ring->gpu_addr); in vce_v4_0_start()
362 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI3), upper_32_bits(ring->gpu_addr)); in vce_v4_0_start()
363 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE3), ring->ring_size / 4); in vce_v4_0_start()
366 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), VCE_STATUS__JOB_BUSY_MASK, in vce_v4_0_start()
369 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CNTL), 1, ~0x200001); in vce_v4_0_start()
371 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), 0, in vce_v4_0_start()
378 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), 0, ~VCE_STATUS__JOB_BUSY_MASK); in vce_v4_0_start()
385 return 0; in vce_v4_0_start()
392 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CNTL), 0, ~0x200001); in vce_v4_0_stop()
395 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), in vce_v4_0_stop()
400 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), 0); in vce_v4_0_stop()
407 return 0; in vce_v4_0_stop()
422 return 0; in vce_v4_0_early_init()
465 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v4_0_sw_init()
469 ring->vm_hub = AMDGPU_MMHUB0(0); in vce_v4_0_sw_init()
478 if (i == 0) in vce_v4_0_sw_init()
483 r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0, in vce_v4_0_sw_init()
528 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v4_0_hw_init()
536 return 0; in vce_v4_0_hw_init()
553 return 0; in vce_v4_0_hw_fini()
562 return 0; in vce_v4_0_suspend()
590 amdgpu_asic_set_vce_clocks(adev, 0, 0); in vce_v4_0_suspend()
635 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_A), 0, ~(1 << 16)); in vce_v4_0_mc_resume()
636 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING), 0x1FF000, ~0xFF9FF000); in vce_v4_0_mc_resume()
637 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_REG_CLOCK_GATING), 0x3F, ~0x3F); in vce_v4_0_mc_resume()
638 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_B), 0x1FF); in vce_v4_0_mc_resume()
640 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL), 0x00398000); in vce_v4_0_mc_resume()
641 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CACHE_CTRL), 0x0, ~0x1); in vce_v4_0_mc_resume()
642 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL), 0); in vce_v4_0_mc_resume()
643 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL1), 0); in vce_v4_0_mc_resume()
644 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VM_CTRL), 0); in vce_v4_0_mc_resume()
651 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR0), in vce_v4_0_mc_resume()
653 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR0), in vce_v4_0_mc_resume()
654 (tmr_mc_addr >> 40) & 0xff); in vce_v4_0_mc_resume()
655 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), 0); in vce_v4_0_mc_resume()
657 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR0), in vce_v4_0_mc_resume()
659 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR0), in vce_v4_0_mc_resume()
660 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_mc_resume()
661 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), offset & ~0x0f000000); in vce_v4_0_mc_resume()
665 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE0), size); in vce_v4_0_mc_resume()
667 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR1), (adev->vce.gpu_addr >> 8)); in vce_v4_0_mc_resume()
668 …WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR1), (adev->vce.gpu_addr >> 40) & 0xf… in vce_v4_0_mc_resume()
669 offset = (adev->firmware.load_type != AMDGPU_FW_LOAD_PSP) ? offset + size : 0; in vce_v4_0_mc_resume()
671 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET1), (offset & ~0x0f000000) | (1 << 24)); in vce_v4_0_mc_resume()
672 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE1), size); in vce_v4_0_mc_resume()
674 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR2), (adev->vce.gpu_addr >> 8)); in vce_v4_0_mc_resume()
675 …WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR2), (adev->vce.gpu_addr >> 40) & 0xf… in vce_v4_0_mc_resume()
678 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET2), (offset & ~0x0f000000) | (2 << 24)); in vce_v4_0_mc_resume()
679 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE2), size); in vce_v4_0_mc_resume()
681 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL2), 0x0, ~0x100); in vce_v4_0_mc_resume()
682 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SYS_INT_EN), in vce_v4_0_mc_resume()
691 return 0; in vce_v4_0_set_clockgating_state()
760 lower_32_bits(pd_addr), 0xffffffff); in vce_v4_0_emit_vm_flush()
776 uint32_t val = 0; in vce_v4_0_set_interrupt_state()
782 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SYS_INT_EN), val, in vce_v4_0_set_interrupt_state()
785 return 0; in vce_v4_0_set_interrupt_state()
794 switch (entry->src_data[0]) { in vce_v4_0_process_interrupt()
795 case 0: in vce_v4_0_process_interrupt()
798 amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]); in vce_v4_0_process_interrupt()
802 entry->src_id, entry->src_data[0]); in vce_v4_0_process_interrupt()
806 return 0; in vce_v4_0_process_interrupt()
824 .align_mask = 0x3f,
858 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v4_0_set_ring_funcs()
880 .minor = 0,
881 .rev = 0,