Lines Matching +full:0 +full:x10021
20 {0x7e40, 0x7e44, 0x020, 28}, /* t6_tp_pio_regs_20_to_3b */
21 {0x7e40, 0x7e44, 0x040, 10}, /* t6_tp_pio_regs_40_to_49 */
22 {0x7e40, 0x7e44, 0x050, 10}, /* t6_tp_pio_regs_50_to_59 */
23 {0x7e40, 0x7e44, 0x060, 14}, /* t6_tp_pio_regs_60_to_6d */
24 {0x7e40, 0x7e44, 0x06F, 1}, /* t6_tp_pio_regs_6f */
25 {0x7e40, 0x7e44, 0x070, 6}, /* t6_tp_pio_regs_70_to_75 */
26 {0x7e40, 0x7e44, 0x130, 18}, /* t6_tp_pio_regs_130_to_141 */
27 {0x7e40, 0x7e44, 0x145, 19}, /* t6_tp_pio_regs_145_to_157 */
28 {0x7e40, 0x7e44, 0x160, 1}, /* t6_tp_pio_regs_160 */
29 {0x7e40, 0x7e44, 0x230, 25}, /* t6_tp_pio_regs_230_to_248 */
30 {0x7e40, 0x7e44, 0x24a, 3}, /* t6_tp_pio_regs_24c */
31 {0x7e40, 0x7e44, 0x8C0, 1} /* t6_tp_pio_regs_8c0 */
35 {0x7e40, 0x7e44, 0x020, 28}, /* t5_tp_pio_regs_20_to_3b */
36 {0x7e40, 0x7e44, 0x040, 19}, /* t5_tp_pio_regs_40_to_52 */
37 {0x7e40, 0x7e44, 0x054, 2}, /* t5_tp_pio_regs_54_to_55 */
38 {0x7e40, 0x7e44, 0x060, 13}, /* t5_tp_pio_regs_60_to_6c */
39 {0x7e40, 0x7e44, 0x06F, 1}, /* t5_tp_pio_regs_6f */
40 {0x7e40, 0x7e44, 0x120, 4}, /* t5_tp_pio_regs_120_to_123 */
41 {0x7e40, 0x7e44, 0x12b, 2}, /* t5_tp_pio_regs_12b_to_12c */
42 {0x7e40, 0x7e44, 0x12f, 21}, /* t5_tp_pio_regs_12f_to_143 */
43 {0x7e40, 0x7e44, 0x145, 19}, /* t5_tp_pio_regs_145_to_157 */
44 {0x7e40, 0x7e44, 0x230, 25}, /* t5_tp_pio_regs_230_to_248 */
45 {0x7e40, 0x7e44, 0x8C0, 1} /* t5_tp_pio_regs_8c0 */
49 {0x7e18, 0x7e1c, 0x0, 12}
53 {0x7e18, 0x7e1c, 0x0, 12}
57 {0x7e50, 0x7e54, 0x0, 13},
58 {0x7e50, 0x7e54, 0x10, 6},
59 {0x7e50, 0x7e54, 0x18, 21},
60 {0x7e50, 0x7e54, 0x30, 32},
61 {0x7e50, 0x7e54, 0x50, 22},
62 {0x7e50, 0x7e54, 0x68, 12}
66 {0x7e50, 0x7e54, 0x0, 13},
67 {0x7e50, 0x7e54, 0x10, 6},
68 {0x7e50, 0x7e54, 0x18, 8},
69 {0x7e50, 0x7e54, 0x20, 13},
70 {0x7e50, 0x7e54, 0x30, 16},
71 {0x7e50, 0x7e54, 0x40, 16},
72 {0x7e50, 0x7e54, 0x50, 16},
73 {0x7e50, 0x7e54, 0x60, 6},
74 {0x7e50, 0x7e54, 0x68, 4}
78 {0x10cc, 0x10d0, 0x0, 16},
79 {0x10cc, 0x10d4, 0x0, 16},
83 /* 1 addr reg SGE_QBASE_INDEX and 4 data reg SGE_QBASE_MAP[0-3] */
84 0x1250, 0x1240, 0x1244, 0x1248, 0x124c,
88 {0x5a04, 0x5a0c, 0x00, 0x20}, /* t5_pcie_pdbg_regs_00_to_20 */
89 {0x5a04, 0x5a0c, 0x21, 0x20}, /* t5_pcie_pdbg_regs_21_to_40 */
90 {0x5a04, 0x5a0c, 0x41, 0x10}, /* t5_pcie_pdbg_regs_41_to_50 */
94 {0x5a10, 0x5a18, 0x00, 0x20}, /* t5_pcie_cdbg_regs_00_to_20 */
95 {0x5a10, 0x5a18, 0x21, 0x18}, /* t5_pcie_cdbg_regs_21_to_37 */
99 {0x8FD0, 0x8FD4, 0x10000, 0x20}, /* t5_pm_rx_regs_10000_to_10020 */
100 {0x8FD0, 0x8FD4, 0x10021, 0x0D}, /* t5_pm_rx_regs_10021_to_1002c */
104 {0x8FF0, 0x8FF4, 0x10000, 0x20}, /* t5_pm_tx_regs_10000_to_10020 */
105 {0x8FF0, 0x8FF4, 0x10021, 0x1D}, /* t5_pm_tx_regs_10021_to_1003c */
109 {0x0, 0x34},
110 {0x3c, 0x40},
111 {0x50, 0x64},
112 {0x70, 0x80},
113 {0x94, 0xa0},
114 {0xb0, 0xb8},
115 {0xd0, 0xd4},
116 {0x100, 0x128},
117 {0x140, 0x148},
118 {0x150, 0x164},
119 {0x170, 0x178},
120 {0x180, 0x194},
121 {0x1a0, 0x1b8},
122 {0x1c0, 0x208},
126 {0x78f8, 0x78fc, 0xa000, 23}, /* t6_ma_regs_a000_to_a016 */
127 {0x78f8, 0x78fc, 0xa400, 30}, /* t6_ma_regs_a400_to_a41e */
128 {0x78f8, 0x78fc, 0xa800, 20} /* t6_ma_regs_a800_to_a813 */
132 {0x78f8, 0x78fc, 0xe400, 17}, /* t6_ma_regs_e400_to_e600 */
133 {0x78f8, 0x78fc, 0xe640, 13} /* t6_ma_regs_e640_to_e7c0 */
137 {0x7b50, 0x7b54, 0x2000, 0x20, 0}, /* up_cim_2000_to_207c */
138 {0x7b50, 0x7b54, 0x2080, 0x1d, 0}, /* up_cim_2080_to_20fc */
139 {0x7b50, 0x7b54, 0x00, 0x20, 0}, /* up_cim_00_to_7c */
140 {0x7b50, 0x7b54, 0x80, 0x20, 0}, /* up_cim_80_to_fc */
141 {0x7b50, 0x7b54, 0x100, 0x11, 0}, /* up_cim_100_to_14c */
142 {0x7b50, 0x7b54, 0x200, 0x10, 0}, /* up_cim_200_to_23c */
143 {0x7b50, 0x7b54, 0x240, 0x2, 0}, /* up_cim_240_to_244 */
144 {0x7b50, 0x7b54, 0x250, 0x2, 0}, /* up_cim_250_to_254 */
145 {0x7b50, 0x7b54, 0x260, 0x2, 0}, /* up_cim_260_to_264 */
146 {0x7b50, 0x7b54, 0x270, 0x2, 0}, /* up_cim_270_to_274 */
147 {0x7b50, 0x7b54, 0x280, 0x20, 0}, /* up_cim_280_to_2fc */
148 {0x7b50, 0x7b54, 0x300, 0x20, 0}, /* up_cim_300_to_37c */
149 {0x7b50, 0x7b54, 0x380, 0x14, 0}, /* up_cim_380_to_3cc */
150 {0x7b50, 0x7b54, 0x4900, 0x4, 0x4}, /* up_cim_4900_to_4c60 */
151 {0x7b50, 0x7b54, 0x4904, 0x4, 0x4}, /* up_cim_4904_to_4c64 */
152 {0x7b50, 0x7b54, 0x4908, 0x4, 0x4}, /* up_cim_4908_to_4c68 */
153 {0x7b50, 0x7b54, 0x4910, 0x4, 0x4}, /* up_cim_4910_to_4c70 */
154 {0x7b50, 0x7b54, 0x4914, 0x4, 0x4}, /* up_cim_4914_to_4c74 */
155 {0x7b50, 0x7b54, 0x4920, 0x10, 0x10}, /* up_cim_4920_to_4a10 */
156 {0x7b50, 0x7b54, 0x4924, 0x10, 0x10}, /* up_cim_4924_to_4a14 */
157 {0x7b50, 0x7b54, 0x4928, 0x10, 0x10}, /* up_cim_4928_to_4a18 */
158 {0x7b50, 0x7b54, 0x492c, 0x10, 0x10}, /* up_cim_492c_to_4a1c */
162 {0x7b50, 0x7b54, 0x2000, 0x20, 0}, /* up_cim_2000_to_207c */
163 {0x7b50, 0x7b54, 0x2080, 0x19, 0}, /* up_cim_2080_to_20ec */
164 {0x7b50, 0x7b54, 0x00, 0x20, 0}, /* up_cim_00_to_7c */
165 {0x7b50, 0x7b54, 0x80, 0x20, 0}, /* up_cim_80_to_fc */
166 {0x7b50, 0x7b54, 0x100, 0x11, 0}, /* up_cim_100_to_14c */
167 {0x7b50, 0x7b54, 0x200, 0x10, 0}, /* up_cim_200_to_23c */
168 {0x7b50, 0x7b54, 0x240, 0x2, 0}, /* up_cim_240_to_244 */
169 {0x7b50, 0x7b54, 0x250, 0x2, 0}, /* up_cim_250_to_254 */
170 {0x7b50, 0x7b54, 0x260, 0x2, 0}, /* up_cim_260_to_264 */
171 {0x7b50, 0x7b54, 0x270, 0x2, 0}, /* up_cim_270_to_274 */
172 {0x7b50, 0x7b54, 0x280, 0x20, 0}, /* up_cim_280_to_2fc */
173 {0x7b50, 0x7b54, 0x300, 0x20, 0}, /* up_cim_300_to_37c */
174 {0x7b50, 0x7b54, 0x380, 0x14, 0}, /* up_cim_380_to_3cc */
178 {0x51320, 0x51324, 0xa000, 32} /* t6_hma_regs_a000_to_a01f */
183 struct cudbg_tcam tcam_region = { 0 }; in cudbg_get_entity_length()
184 u32 value, n = 0, len = 0; in cudbg_get_entity_length()
228 len = cudbg_cim_obq_size(adap, 0); in cudbg_get_entity_length()
382 n = 0; in cudbg_get_entity_length()
429 struct cudbg_buffer temp_in_buff = { 0 }; in cudbg_do_compression()
439 bytes_read = 0; in cudbg_do_compression()
440 while (bytes_left > 0) { in cudbg_do_compression()
454 return 0; in cudbg_do_compression()
461 int rc = 0; in cudbg_write_and_release_buff()
481 return 0; in is_fw_attached()
492 u8 zero_buf[4] = {0}; in cudbg_align_debug_buffer()
521 if (vaddr < 0) in cudbg_read_vpd_reg()
525 if (rc < 0) in cudbg_read_vpd_reg()
528 return 0; in cudbg_read_vpd_reg()
544 memset(meminfo_buff->avail, 0, in cudbg_fill_meminfo()
547 memset(meminfo_buff->mem, 0, in cudbg_fill_meminfo()
551 for (i = 0; i < ARRAY_SIZE(meminfo_buff->mem); i++) { in cudbg_fill_meminfo()
552 meminfo_buff->mem[i].limit = 0; in cudbg_fill_meminfo()
557 i = 0; in cudbg_fill_meminfo()
566 meminfo_buff->avail[i].idx = 0; in cudbg_fill_meminfo()
665 md->limit = 0; in cudbg_fill_meminfo()
667 md->base = 0; in cudbg_fill_meminfo()
675 } while (0) in cudbg_fill_meminfo()
686 md->base = 0; in cudbg_fill_meminfo()
691 u32 size = 0; in cudbg_fill_meminfo()
710 md->limit = 0; in cudbg_fill_meminfo()
713 md->limit = 0; in cudbg_fill_meminfo()
724 for (n = 0; n < i - 1; n++) in cudbg_fill_meminfo()
749 for (i = 0, meminfo_buff->free_rx_cnt = 0; i < 2; i++) in cudbg_fill_meminfo()
754 meminfo_buff->rx_pages_data[0] = PMRXMAXPAGE_G(lo); in cudbg_fill_meminfo()
761 for (i = 0, meminfo_buff->free_tx_cnt = 0; i < 4; i++) in cudbg_fill_meminfo()
766 meminfo_buff->tx_pages_data[0] = PMTXMAXPAGE_G(lo); in cudbg_fill_meminfo()
777 for (i = 0; i < 4; i++) { in cudbg_fill_meminfo()
794 for (i = 0; i < padap->params.arch.nchan; i++) { in cudbg_fill_meminfo()
811 return 0; in cudbg_fill_meminfo()
819 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_reg_dump()
820 u32 buf_size = 0; in cudbg_collect_reg_dump()
821 int rc = 0; in cudbg_collect_reg_dump()
840 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_fw_devlog()
842 int rc = 0; in cudbg_collect_fw_devlog()
845 if (rc < 0) { in cudbg_collect_fw_devlog()
856 if (dparams->start != 0) { in cudbg_collect_fw_devlog()
878 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_la()
880 u32 cfg = 0; in cudbg_collect_cim_la()
906 if (rc < 0) { in cudbg_collect_cim_la()
919 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_ma_la()
939 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_qcfg()
977 struct cudbg_buffer temp_buff = { 0 }; in cudbg_read_cim_ibq()
978 int no_of_read_words, rc = 0; in cudbg_read_cim_ibq()
990 /* no_of_read_words is less than or equal to 0 means error */ in cudbg_read_cim_ibq()
991 if (no_of_read_words <= 0) { in cudbg_read_cim_ibq()
1007 return cudbg_read_cim_ibq(pdbg_init, dbg_buff, cudbg_err, 0); in cudbg_collect_cim_ibq_tp0()
1061 struct cudbg_buffer temp_buff = { 0 }; in cudbg_read_cim_obq()
1062 int no_of_read_words, rc = 0; in cudbg_read_cim_obq()
1074 /* no_of_read_words is less than or equal to 0 means error */ in cudbg_read_cim_obq()
1075 if (no_of_read_words <= 0) { in cudbg_read_cim_obq()
1091 return cudbg_read_cim_obq(pdbg_init, dbg_buff, cudbg_err, 0); in cudbg_collect_cim_obq_ulp0()
1170 for (i = 0; i < mem_info->avail_c; i++) { in cudbg_meminfo_get_mem_index()
1173 return 0; in cudbg_meminfo_get_mem_index()
1186 u8 mc, found = 0; in cudbg_get_mem_region()
1187 u32 idx = 0; in cudbg_get_mem_region()
1195 if (i < 0) in cudbg_get_mem_region()
1199 for (i = 0; i < meminfo->mem_c; i++) { in cudbg_get_mem_region()
1206 meminfo->mem[i + 1].base - 1 : ~0; in cudbg_get_mem_region()
1226 return 0; in cudbg_get_mem_region()
1229 /* Fetch and update the start and end of the requested memory region w.r.t 0
1244 *out_base = 0; in cudbg_get_mem_relative()
1253 return 0; in cudbg_get_mem_relative()
1261 struct cudbg_mem_desc mem_desc = { 0 }; in cudbg_get_payload_range()
1273 return 0; in cudbg_get_payload_range()
1296 if (addr & 0x3 || (uintptr_t)hbuf & 0x3) in cudbg_memory_read()
1302 resid = len & 0x7; in cudbg_memory_read()
1311 win_pf = is_t4(adap->params.chip) ? 0 : PFNUM_V(adap->pf); in cudbg_memory_read()
1322 while (len > 0) { in cudbg_memory_read()
1333 offset = 0; in cudbg_memory_read()
1351 offset = 0; in cudbg_memory_read()
1361 return 0; in cudbg_memory_read()
1373 unsigned long bytes, bytes_left, bytes_read = 0; in cudbg_read_fw_mem()
1375 struct cudbg_buffer temp_buff = { 0 }; in cudbg_read_fw_mem()
1377 u32 yield_count = 0; in cudbg_read_fw_mem()
1378 int rc = 0; in cudbg_read_fw_mem()
1382 memset(payload, 0, sizeof(payload)); in cudbg_read_fw_mem()
1383 for (i = 0; i < ARRAY_SIZE(region_name); i++) { in cudbg_read_fw_mem()
1399 while (bytes_left > 0) { in cudbg_read_fw_mem()
1415 for (i = 0; i < ARRAY_SIZE(payload); i++) in cudbg_read_fw_mem()
1468 memset(&mem_info, 0, sizeof(struct cudbg_meminfo)); in cudbg_mem_region_size()
1486 return 0; in cudbg_mem_region_size()
1494 unsigned long size = 0; in cudbg_collect_mem_region()
1550 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_rss()
1573 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_rss_vf_config()
1585 for (vf = 0; vf < vf_count; vf++) in cudbg_collect_rss_vf_config()
1596 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_path_mtu()
1613 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pm_stats()
1633 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_hw_sched()
1635 int i, rc = 0; in cudbg_collect_hw_sched()
1650 for (i = 0; i < NTX_SCHED; ++i) in cudbg_collect_hw_sched()
1661 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_tp_indirect()
1663 int i, rc, n = 0; in cudbg_collect_tp_indirect()
1689 for (i = 0; i < n; i++) { in cudbg_collect_tp_indirect()
1694 tp_pio->ireg_addr = t5_tp_pio_array[i][0]; in cudbg_collect_tp_indirect()
1699 tp_pio->ireg_addr = t6_tp_pio_array[i][0]; in cudbg_collect_tp_indirect()
1715 for (i = 0; i < n; i++) { in cudbg_collect_tp_indirect()
1720 tp_pio->ireg_addr = t5_tp_tm_pio_array[i][0]; in cudbg_collect_tp_indirect()
1725 tp_pio->ireg_addr = t6_tp_tm_pio_array[i][0]; in cudbg_collect_tp_indirect()
1743 for (i = 0; i < n ; i++) { in cudbg_collect_tp_indirect()
1748 tp_pio->ireg_addr = t5_tp_mib_index_array[i][0]; in cudbg_collect_tp_indirect()
1755 tp_pio->ireg_addr = t6_tp_mib_index_array[i][0]; in cudbg_collect_tp_indirect()
1780 * Entries 0->7 are PF0->7, Entries 8->263 are VFID0->256. in cudbg_read_sge_qbase_indirect_reg()
1786 for (i = 0; i < SGE_QBASE_DATA_REG_NUM; i++, buff++) in cudbg_read_sge_qbase_indirect_reg()
1795 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_sge_indirect()
1798 u8 padap_running = 0; in cudbg_collect_sge_indirect()
1802 /* Accessing SGE_QBASE_MAP[0-3] and SGE_QBASE_INDEX regs can in cudbg_collect_sge_indirect()
1821 for (i = 0; i < 2; i++) { in cudbg_collect_sge_indirect()
1825 sge_pio->ireg_addr = t5_sge_dbg_index_array[i][0]; in cudbg_collect_sge_indirect()
1842 * SGE_QBASE_MAP[0-3] in cudbg_collect_sge_indirect()
1844 sge_qbase->reg_addr = t6_sge_qbase_index_array[0]; in cudbg_collect_sge_indirect()
1845 for (i = 0; i < SGE_QBASE_DATA_REG_NUM; i++) in cudbg_collect_sge_indirect()
1849 for (i = 0; i <= PCIE_FW_MASTER_M; i++) in cudbg_collect_sge_indirect()
1853 for (i = 0; i < padap->params.arch.vfcount; i++) in cudbg_collect_sge_indirect()
1868 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_ulprx_la()
1888 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_tp_la()
1908 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_meminfo()
1943 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_pif_la()
1965 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_clk_info()
2013 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pcie_indirect()
2026 for (i = 0; i < n; i++) { in cudbg_collect_pcie_indirect()
2030 pcie_pio->ireg_addr = t5_pcie_pdbg_array[i][0]; in cudbg_collect_pcie_indirect()
2045 for (i = 0; i < n; i++) { in cudbg_collect_pcie_indirect()
2049 pcie_pio->ireg_addr = t5_pcie_cdbg_array[i][0]; in cudbg_collect_pcie_indirect()
2069 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pm_indirect()
2082 for (i = 0; i < n; i++) { in cudbg_collect_pm_indirect()
2086 pm_pio->ireg_addr = t5_pm_rx_array[i][0]; in cudbg_collect_pm_indirect()
2101 for (i = 0; i < n; i++) { in cudbg_collect_pm_indirect()
2105 pm_pio->ireg_addr = t5_pm_tx_array[i][0]; in cudbg_collect_pm_indirect()
2126 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_tid()
2153 FW_PARAMS_PARAM_Y_V(0) | \ in cudbg_collect_tid()
2154 FW_PARAMS_PARAM_Z_V(0)) in cudbg_collect_tid()
2156 para[0] = FW_PARAM_PFVF_A(ETHOFLD_START); in cudbg_collect_tid()
2158 rc = t4_query_params(padap, padap->mbox, padap->pf, 0, 2, para, val); in cudbg_collect_tid()
2159 if (rc < 0) { in cudbg_collect_tid()
2164 tid->uotid_base = val[0]; in cudbg_collect_tid()
2165 tid->nuotids = val[1] - val[0] + 1; in cudbg_collect_tid()
2174 para[0] = FW_PARAM_PFVF_A(HPFILTER_START); in cudbg_collect_tid()
2176 rc = t4_query_params(padap, padap->mbox, padap->pf, 0, 2, in cudbg_collect_tid()
2178 if (rc < 0) { in cudbg_collect_tid()
2183 tid->hpftid_base = val[0]; in cudbg_collect_tid()
2184 tid->nhpftids = val[1] - val[0] + 1; in cudbg_collect_tid()
2217 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pcie_config()
2228 for (i = 0; i < n; i++) { in cudbg_collect_pcie_config()
2229 for (j = t5_pcie_config_array[i][0]; in cudbg_collect_pcie_config()
2240 int index, bit, bit_pos = 0; in cudbg_sge_ctxt_check_valid()
2274 found = 0; in cudbg_get_ctxt_region_info()
2275 memset(&mem_desc, 0, sizeof(struct cudbg_mem_desc)); in cudbg_get_ctxt_region_info()
2276 for (j = 0; j < ARRAY_SIZE(meminfo.avail); j++) { in cudbg_get_ctxt_region_info()
2314 return 0; in cudbg_get_ctxt_region_info()
2319 struct cudbg_region_info region_info[CTXT_CNM + 1] = { {0} }; in cudbg_dump_context_size()
2320 u8 mem_type[CTXT_INGRESS + 1] = { 0 }; in cudbg_dump_context_size()
2321 u32 i, size = 0; in cudbg_dump_context_size()
2329 for (i = 0; i < CTXT_CNM; i++) { in cudbg_dump_context_size()
2370 for (j = 0; j < max_qid; j++) { in cudbg_get_sge_ctxt_fw()
2394 struct cudbg_region_info region_info[CTXT_CNM + 1] = { {0} }; in cudbg_collect_dump_context()
2397 u8 mem_type[CTXT_INGRESS + 1] = { 0 }; in cudbg_collect_dump_context()
2398 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_dump_context()
2410 if (rc <= 0) in cudbg_collect_dump_context()
2467 for (j = 0; j < max_ctx_qid; j++) { in cudbg_collect_dump_context()
2477 for (k = 0; k < SGE_CTXT_SIZE / sizeof(u64); k++) in cudbg_collect_dump_context()
2545 int rc = 0; in cudbg_collect_tcam_index()
2549 * CtlCmdType - 0: Read, 1: Write in cudbg_collect_tcam_index()
2550 * CtlTcamSel - 0: TCAM0, 1: TCAM1 in cudbg_collect_tcam_index()
2551 * CtlXYBitSel- 0: Y bit, 1: X bit in cudbg_collect_tcam_index()
2555 ctl = CTLREQID_V(1) | CTLCMDTYPE_V(0) | CTLXYBITSEL_V(0); in cudbg_collect_tcam_index()
2557 ctl |= CTLTCAMINDEX_V(idx) | CTLTCAMSEL_V(0); in cudbg_collect_tcam_index()
2568 /* 0 - Outer header, 1 - Inner header in cudbg_collect_tcam_index()
2617 memset(&ldst_cmd, 0, sizeof(ldst_cmd)); in cudbg_collect_tcam_index()
2639 rc = 0; in cudbg_collect_tcam_index()
2644 tcam->rplc[0] = ntohl(mps_rplc.rplc31_0); in cudbg_collect_tcam_index()
2666 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_mps_tcam()
2667 u32 size = 0, i, n, total_size = 0; in cudbg_collect_mps_tcam()
2678 for (i = 0; i < n; i++) { in cudbg_collect_mps_tcam()
2703 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_vpd_data()
2706 struct vpd_params vpd = { 0 }; in cudbg_collect_vpd_data()
2723 vpd_str[CUDBG_VPD_VER_LEN] = '\0'; in cudbg_collect_vpd_data()
2724 rc = kstrtouint(vpd_str, 0, &vpd_vers); in cudbg_collect_vpd_data()
2754 /* Fill REQ_DATA regs with 0's */ in cudbg_read_tid()
2755 for (i = 0; i < NUM_LE_DB_DBGI_REQ_DATA_INSTANCES; i++) in cudbg_read_tid()
2756 t4_write_reg(padap, LE_DB_DBGI_REQ_DATA_A + (i << 2), 0); in cudbg_read_tid()
2784 for (i = 0; i < NUM_LE_DB_DBGI_RSP_DATA_INSTANCES; i++) in cudbg_read_tid()
2789 return 0; in cudbg_read_tid()
2817 int ipv6 = 0; in cudbg_is_ipv6_entry()
2822 return 0; in cudbg_is_ipv6_entry()
2825 ipv6 = tid_data->data[16] & 0x8000; in cudbg_is_ipv6_entry()
2827 ipv6 = tid_data->data[16] & 0x8000; in cudbg_is_ipv6_entry()
2829 ipv6 = tid_data->data[9] == 0x00C00000; in cudbg_is_ipv6_entry()
2831 ipv6 = 0; in cudbg_is_ipv6_entry()
2869 tcam_region->max_tid = (value & 0xFFFFF) + in cudbg_fill_le_tcam_info()
2895 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_le_tcam()
2896 struct cudbg_tcam tcam_region = { 0 }; in cudbg_collect_le_tcam()
2898 u32 bytes = 0; in cudbg_collect_le_tcam()
2914 for (i = 0; i < tcam_region.max_tid; ) { in cudbg_collect_le_tcam()
2950 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cctrl()
2968 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_ma_indirect()
2983 for (i = 0; i < n; i++) { in cudbg_collect_ma_indirect()
2987 ma_fli->ireg_addr = t6_ma_ireg_array[i][0]; in cudbg_collect_ma_indirect()
2998 for (i = 0; i < n; i++) { in cudbg_collect_ma_indirect()
3002 ma_fli->ireg_addr = t6_ma_ireg_array2[i][0]; in cudbg_collect_ma_indirect()
3005 for (j = 0; j < t6_ma_ireg_array2[i][3]; j++) { in cudbg_collect_ma_indirect()
3010 ma_fli->ireg_local_offset += 0x20; in cudbg_collect_ma_indirect()
3022 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_ulptx_la()
3042 for (i = 0; i < CUDBG_NUM_ULPTX; i++) { in cudbg_collect_ulptx_la()
3045 0x10 * i); in cudbg_collect_ulptx_la()
3048 0x10 * i); in cudbg_collect_ulptx_la()
3051 0x10 * i); in cudbg_collect_ulptx_la()
3052 for (j = 0; j < CUDBG_NUM_ULPTX_READ; j++) in cudbg_collect_ulptx_la()
3055 ULP_TX_LA_RDDATA_0_A + 0x10 * i); in cudbg_collect_ulptx_la()
3058 for (i = 0; i < CUDBG_NUM_ULPTX_ASIC_READ; i++) { in cudbg_collect_ulptx_la()
3059 t4_write_reg(padap, ULP_TX_ASIC_DEBUG_CTRL_A, 0x1); in cudbg_collect_ulptx_la()
3062 ulptx_la_buff->rddata_asic[i][0] = in cudbg_collect_ulptx_la()
3084 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_up_cim_indirect()
3088 u32 instance = 0; in cudbg_collect_up_cim_indirect()
3106 for (i = 0; i < n; i++) { in cudbg_collect_up_cim_indirect()
3111 up_cim_reg->ireg_addr = t5_up_cim_reg_array[i][0]; in cudbg_collect_up_cim_indirect()
3119 up_cim_reg->ireg_addr = t6_up_cim_reg_array[i][0]; in cudbg_collect_up_cim_indirect()
3131 local_offset = 0x120; in cudbg_collect_up_cim_indirect()
3136 local_offset = 0x10; in cudbg_collect_up_cim_indirect()
3141 local_offset = 0; in cudbg_collect_up_cim_indirect()
3146 for (j = 0; j < iter; j++, buff++) { in cudbg_collect_up_cim_indirect()
3165 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pbt_tables()
3179 for (i = 0; i < CUDBG_PBT_DYNAMIC_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3192 for (i = 0; i < CUDBG_PBT_STATIC_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3204 for (i = 0; i < CUDBG_LRF_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3216 for (i = 0; i < CUDBG_PBT_DATA_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3234 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_mbox_log()
3251 for (k = 0; k < mbox_cmds; k++) { in cudbg_collect_mbox_log()
3258 if (entry->timestamp == 0) in cudbg_collect_mbox_log()
3262 for (i = 0; i < MBOX_LEN / 8; i++) { in cudbg_collect_mbox_log()
3277 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_hma_indirect()
3292 for (i = 0; i < n; i++) { in cudbg_collect_hma_indirect()
3296 hma_fli->ireg_addr = t6_hma_ireg_array[i][0]; in cudbg_collect_hma_indirect()
3311 u32 tot_entries = 0, tot_size = 0; in cudbg_fill_qdesc_num_and_size()
3366 u32 num_queues = 0, tot_entries = 0, size = 0; in cudbg_collect_qdesc()
3368 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_qdesc()
3396 if (size <= 0) { \ in cudbg_collect_qdesc()
3405 } while (0) in cudbg_collect_qdesc()
3410 } while (0) in cudbg_collect_qdesc()
3415 } while (0) in cudbg_collect_qdesc()
3420 } while (0) in cudbg_collect_qdesc()
3423 for (i = 0; i < s->ethqsets; i++) in cudbg_collect_qdesc()
3427 for (i = 0; i < s->ethqsets; i++) in cudbg_collect_qdesc()
3431 for (i = 0; i < s->ethqsets; i++) in cudbg_collect_qdesc()
3435 for (i = 0; i < padap->params.nports; i++) in cudbg_collect_qdesc()
3454 for (j = 0; j < CXGB4_TX_MAX; j++) { in cudbg_collect_qdesc()
3459 for (i = 0; i < utxq->ntxq; i++) in cudbg_collect_qdesc()
3471 for (j = 0; j < CXGB4_ULD_MAX; j++) { in cudbg_collect_qdesc()
3476 for (i = 0; i < urxq->nrxq; i++) in cudbg_collect_qdesc()
3483 for (j = 0; j < CXGB4_ULD_MAX; j++) { in cudbg_collect_qdesc()
3488 for (i = 0; i < urxq->nrxq; i++) in cudbg_collect_qdesc()
3495 for (j = 0; j < CXGB4_ULD_MAX; j++) { in cudbg_collect_qdesc()
3501 for (i = 0; i < urxq->nciq; i++) in cudbg_collect_qdesc()
3515 for (i = 0; i < s->eoqsets; i++) in cudbg_collect_qdesc()
3521 for (i = 0; i < s->eoqsets; i++) in cudbg_collect_qdesc()
3525 for (i = 0; i < s->eoqsets; i++) in cudbg_collect_qdesc()
3536 cur_off = 0; in cudbg_collect_qdesc()
3580 struct cudbg_buffer temp_buff = {0}; in cudbg_collect_flash()
3586 for (i = 0; i < count; i += SF_PAGE_SIZE) { in cudbg_collect_flash()
3594 rc = t4_read_flash(padap, addr, n, (u32 *)temp_buff.data, 0); in cudbg_collect_flash()