Lines Matching +full:0 +full:x00000100
82 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
83 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
84 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
85 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
86 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
87 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
88 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
89 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
90 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
91 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
96 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
97 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
102 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
103 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
104 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
105 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
106 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
107 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
108 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
109 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
114 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
115 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
120 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
121 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
122 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
123 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
124 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
125 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
126 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
127 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
128 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
129 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
134 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
135 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
136 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
137 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
138 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
139 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
140 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
141 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
142 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
143 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
148 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
149 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
150 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
151 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
152 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
153 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
154 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
155 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
156 mmSDMA1_GFX_IB_CNTL, 0x00000100, 0x00000100,
157 mmSDMA1_POWER_CNTL, 0x00000800, 0x0003c800,
158 mmSDMA1_RLC0_IB_CNTL, 0x00000100, 0x00000100,
159 mmSDMA1_RLC1_IB_CNTL, 0x00000100, 0x00000100,
164 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
165 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
170 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
171 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
172 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
173 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
178 mmSDMA0_CLK_CTRL, 0xffffffff, 0x00000100,
254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
265 * Returns 0 on success, error on failure.
270 int err = 0, i; in sdma_v3_0_init_microcode()
305 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
306 if (i == 0) in sdma_v3_0_init_microcode()
333 chip_name, i == 0 ? "" : "1"); in sdma_v3_0_init_microcode()
334 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_init_microcode()
405 for (i = 0; i < count; i++) in sdma_v3_0_ring_insert_nop()
406 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_insert_nop()
434 SDMA_PKT_INDIRECT_HEADER_VMID(vmid & 0xf)); in sdma_v3_0_ring_emit_ib()
436 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
439 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
440 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
453 u32 ref_and_mask = 0; in sdma_v3_0_ring_emit_hdp_flush()
455 if (ring->me == 0) in sdma_v3_0_ring_emit_hdp_flush()
467 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_hdp_flush()
504 amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0)); in sdma_v3_0_ring_emit_fence()
519 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
521 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v3_0_gfx_stop()
524 ib_cntl = REG_SET_FIELD(ib_cntl, SDMA0_GFX_IB_CNTL, IB_ENABLE, 0); in sdma_v3_0_gfx_stop()
551 u32 f32_cntl, phase_quantum = 0; in sdma_v3_0_ctx_switch_enable()
556 unsigned unit = 0; in sdma_v3_0_ctx_switch_enable()
578 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
593 AUTO_CTXSW_ENABLE, 0); in sdma_v3_0_ctx_switch_enable()
620 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
623 f32_cntl = REG_SET_FIELD(f32_cntl, SDMA0_F32_CNTL, HALT, 0); in sdma_v3_0_enable()
636 * Returns 0 for success, error for failure.
647 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
652 for (j = 0; j < 16; j++) { in sdma_v3_0_gfx_resume()
653 vi_srbm_select(adev, 0, 0, 0, j); in sdma_v3_0_gfx_resume()
655 WREG32(mmSDMA0_GFX_VIRTUAL_ADDR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
656 WREG32(mmSDMA0_GFX_APE1_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
658 vi_srbm_select(adev, 0, 0, 0, 0); in sdma_v3_0_gfx_resume()
662 adev->gfx.config.gb_addr_config & 0x70); in sdma_v3_0_gfx_resume()
664 WREG32(mmSDMA0_SEM_WAIT_FAIL_TIMER_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
678 ring->wptr = 0; in sdma_v3_0_gfx_resume()
679 WREG32(mmSDMA0_GFX_RB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
681 WREG32(mmSDMA0_GFX_IB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
682 WREG32(mmSDMA0_GFX_IB_OFFSET + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
686 upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
688 lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
702 doorbell = REG_SET_FIELD(doorbell, SDMA0_GFX_DOORBELL, ENABLE, 0); in sdma_v3_0_gfx_resume()
716 WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
723 ENABLE, 0); in sdma_v3_0_gfx_resume()
745 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
752 return 0; in sdma_v3_0_gfx_resume()
761 * Returns 0 for success, error for failure.
766 return 0; in sdma_v3_0_rlc_resume()
775 * Returns 0 for success, error for failure.
793 return 0; in sdma_v3_0_start()
803 * Returns 0 for success, error for failure.
819 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ring()
831 amdgpu_ring_write(ring, 0xDEADBEEF); in sdma_v3_0_ring_test_ring()
834 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ring()
836 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ring()
856 * Returns 0 on success, error on failure.
864 u32 tmp = 0; in sdma_v3_0_ring_test_ib()
873 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ib()
875 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib()
881 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib()
886 ib.ptr[4] = 0xDEADBEEF; in sdma_v3_0_ring_test_ib()
897 if (r == 0) { in sdma_v3_0_ring_test_ib()
900 } else if (r < 0) { in sdma_v3_0_ring_test_ib()
904 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ib()
905 r = 0; in sdma_v3_0_ring_test_ib()
935 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte()
964 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
996 ib->ptr[ib->length_dw++] = 0; in sdma_v3_0_vm_set_pte_pde()
1014 for (i = 0; i < pad_count; i++) in sdma_v3_0_ring_pad_ib()
1015 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_pad_ib()
1038 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_pipeline_sync()
1041 amdgpu_ring_write(ring, addr & 0xfffffffc); in sdma_v3_0_ring_emit_pipeline_sync()
1042 amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff); in sdma_v3_0_ring_emit_pipeline_sync()
1044 amdgpu_ring_write(ring, 0xffffffff); /* mask */ in sdma_v3_0_ring_emit_pipeline_sync()
1045 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_pipeline_sync()
1066 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_vm_flush()
1067 SDMA_PKT_POLL_REGMEM_HEADER_FUNC(0)); /* always */ in sdma_v3_0_ring_emit_vm_flush()
1069 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_vm_flush()
1070 amdgpu_ring_write(ring, 0); /* reference */ in sdma_v3_0_ring_emit_vm_flush()
1071 amdgpu_ring_write(ring, 0); /* mask */ in sdma_v3_0_ring_emit_vm_flush()
1072 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_vm_flush()
1080 SDMA_PKT_SRBM_WRITE_HEADER_BYTE_EN(0xf)); in sdma_v3_0_ring_emit_wreg()
1108 return 0; in sdma_v3_0_early_init()
1135 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_sw_init()
1147 (i == 0) ? AMDGPU_SDMA_IRQ_INSTANCE0 : in sdma_v3_0_sw_init()
1162 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_sw_fini()
1166 return 0; in sdma_v3_0_sw_fini()
1190 return 0; in sdma_v3_0_hw_fini()
1221 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_wait_for_idle()
1226 return 0; in sdma_v3_0_wait_for_idle()
1235 u32 srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1248 adev->sdma.srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1256 u32 srbm_soft_reset = 0; in sdma_v3_0_pre_soft_reset()
1259 return 0; in sdma_v3_0_pre_soft_reset()
1269 return 0; in sdma_v3_0_pre_soft_reset()
1275 u32 srbm_soft_reset = 0; in sdma_v3_0_post_soft_reset()
1278 return 0; in sdma_v3_0_post_soft_reset()
1288 return 0; in sdma_v3_0_post_soft_reset()
1294 u32 srbm_soft_reset = 0; in sdma_v3_0_soft_reset()
1298 return 0; in sdma_v3_0_soft_reset()
1305 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in sdma_v3_0_soft_reset()
1319 return 0; in sdma_v3_0_soft_reset()
1334 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1350 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1365 return 0; in sdma_v3_0_set_trap_irq_state()
1374 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_trap_irq()
1375 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_trap_irq()
1378 case 0: in sdma_v3_0_process_trap_irq()
1380 case 0: in sdma_v3_0_process_trap_irq()
1381 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v3_0_process_trap_irq()
1393 case 0: in sdma_v3_0_process_trap_irq()
1405 return 0; in sdma_v3_0_process_trap_irq()
1415 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_illegal_inst_irq()
1416 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_illegal_inst_irq()
1418 if (instance_id <= 1 && queue_id == 0) in sdma_v3_0_process_illegal_inst_irq()
1420 return 0; in sdma_v3_0_process_illegal_inst_irq()
1431 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1445 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1470 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1478 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1494 return 0; in sdma_v3_0_set_clockgating_state()
1508 return 0; in sdma_v3_0_set_clockgating_state()
1514 return 0; in sdma_v3_0_set_powergating_state()
1523 *flags = 0; in sdma_v3_0_get_clockgating_state()
1526 data = RREG32(mmSDMA0_CLK_CTRL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1531 data = RREG32(mmSDMA0_POWER_CNTL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1558 .align_mask = 0xf,
1588 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_ring_funcs()
1632 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_emit_copy_buffer()
1662 .copy_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1666 .fill_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1674 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
1690 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_vm_pte_funcs()
1701 .minor = 0,
1702 .rev = 0,
1711 .rev = 0,