Lines Matching +full:rx +full:- +full:equalizer

1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
3 * Copyright (C) 2018-2024 Intel Corporation
6 #include "iwl-trans.h"
7 #include "iwl-fh.h"
8 #include "iwl-context-info-gen3.h"
10 #include "iwl-prph.h"
42 struct iwl_dram_data *fw_mon = &trans->dbg.fw_mon; in iwl_pcie_ctxt_info_dbg_enable()
46 if (fw_mon->size) { in iwl_pcie_ctxt_info_dbg_enable()
52 dbg_cfg->hwm_base_addr = cpu_to_le64(fw_mon->physical); in iwl_pcie_ctxt_info_dbg_enable()
53 dbg_cfg->hwm_size = cpu_to_le32(fw_mon->size); in iwl_pcie_ctxt_info_dbg_enable()
59 fw_mon_cfg = &trans->dbg.fw_mon_cfg[alloc_id]; in iwl_pcie_ctxt_info_dbg_enable()
61 switch (le32_to_cpu(fw_mon_cfg->buf_location)) { in iwl_pcie_ctxt_info_dbg_enable()
75 if (trans->dbg.fw_mon_ini[alloc_id].num_frags) { in iwl_pcie_ctxt_info_dbg_enable()
77 &trans->dbg.fw_mon_ini[alloc_id].frags[0]; in iwl_pcie_ctxt_info_dbg_enable()
79 dbg_cfg->hwm_base_addr = cpu_to_le64(frag->physical); in iwl_pcie_ctxt_info_dbg_enable()
80 dbg_cfg->hwm_size = cpu_to_le32(frag->size); in iwl_pcie_ctxt_info_dbg_enable()
81 dbg_cfg->debug_token_config = cpu_to_le32(trans->dbg.ucode_preset); in iwl_pcie_ctxt_info_dbg_enable()
84 dbg_cfg->debug_token_config); in iwl_pcie_ctxt_info_dbg_enable()
88 trans->dbg.fw_mon_ini[alloc_id].num_frags); in iwl_pcie_ctxt_info_dbg_enable()
93 le32_to_cpu(fw_mon_cfg->buf_location)); in iwl_pcie_ctxt_info_dbg_enable()
112 trans->cfg->min_txq_size); in iwl_pcie_ctxt_info_gen3_init()
114 switch (trans_pcie->rx_buf_size) { in iwl_pcie_ctxt_info_gen3_init()
116 return -EINVAL; in iwl_pcie_ctxt_info_gen3_init()
134 if (trans->dsbr_urm_fw_dependent) in iwl_pcie_ctxt_info_gen3_init()
137 if (trans->dsbr_urm_permanent) in iwl_pcie_ctxt_info_gen3_init()
141 prph_scratch = dma_alloc_coherent(trans->dev, sizeof(*prph_scratch), in iwl_pcie_ctxt_info_gen3_init()
142 &trans_pcie->prph_scratch_dma_addr, in iwl_pcie_ctxt_info_gen3_init()
145 return -ENOMEM; in iwl_pcie_ctxt_info_gen3_init()
147 prph_sc_ctrl = &prph_scratch->ctrl_cfg; in iwl_pcie_ctxt_info_gen3_init()
149 prph_sc_ctrl->version.version = 0; in iwl_pcie_ctxt_info_gen3_init()
150 prph_sc_ctrl->version.mac_id = in iwl_pcie_ctxt_info_gen3_init()
151 cpu_to_le16((u16)trans->hw_rev); in iwl_pcie_ctxt_info_gen3_init()
152 prph_sc_ctrl->version.size = cpu_to_le16(sizeof(*prph_scratch) / 4); in iwl_pcie_ctxt_info_gen3_init()
157 if (trans->trans_cfg->imr_enabled) in iwl_pcie_ctxt_info_gen3_init()
160 if (CSR_HW_REV_TYPE(trans->hw_rev) == IWL_CFG_MAC_TYPE_GL && in iwl_pcie_ctxt_info_gen3_init()
168 /* initialize RX default queue */ in iwl_pcie_ctxt_info_gen3_init()
169 prph_sc_ctrl->rbd_cfg.free_rbd_addr = in iwl_pcie_ctxt_info_gen3_init()
170 cpu_to_le64(trans_pcie->rxq->bd_dma); in iwl_pcie_ctxt_info_gen3_init()
172 iwl_pcie_ctxt_info_dbg_enable(trans, &prph_sc_ctrl->hwm_cfg, in iwl_pcie_ctxt_info_gen3_init()
174 prph_sc_ctrl->control.control_flags = cpu_to_le32(control_flags); in iwl_pcie_ctxt_info_gen3_init()
175 prph_sc_ctrl->control.control_flags_ext = cpu_to_le32(control_flags_ext); in iwl_pcie_ctxt_info_gen3_init()
177 /* initialize the Step equalizer data */ in iwl_pcie_ctxt_info_gen3_init()
178 prph_sc_ctrl->step_cfg.mbx_addr_0 = cpu_to_le32(trans->mbx_addr_0_step); in iwl_pcie_ctxt_info_gen3_init()
179 prph_sc_ctrl->step_cfg.mbx_addr_1 = cpu_to_le32(trans->mbx_addr_1_step); in iwl_pcie_ctxt_info_gen3_init()
182 ret = iwl_pcie_init_fw_sec(trans, fw, &prph_scratch->dram); in iwl_pcie_ctxt_info_gen3_init()
192 * dummy TR/CR tail pointers - which shouldn't be necessary as we don't in iwl_pcie_ctxt_info_gen3_init()
197 prph_info = dma_alloc_coherent(trans->dev, PAGE_SIZE, in iwl_pcie_ctxt_info_gen3_init()
198 &trans_pcie->prph_info_dma_addr, in iwl_pcie_ctxt_info_gen3_init()
201 ret = -ENOMEM; in iwl_pcie_ctxt_info_gen3_init()
206 ctxt_info_gen3 = dma_alloc_coherent(trans->dev, in iwl_pcie_ctxt_info_gen3_init()
208 &trans_pcie->ctxt_info_dma_addr, in iwl_pcie_ctxt_info_gen3_init()
211 ret = -ENOMEM; in iwl_pcie_ctxt_info_gen3_init()
215 ctxt_info_gen3->prph_info_base_addr = in iwl_pcie_ctxt_info_gen3_init()
216 cpu_to_le64(trans_pcie->prph_info_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
217 ctxt_info_gen3->prph_scratch_base_addr = in iwl_pcie_ctxt_info_gen3_init()
218 cpu_to_le64(trans_pcie->prph_scratch_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
219 ctxt_info_gen3->prph_scratch_size = in iwl_pcie_ctxt_info_gen3_init()
221 ctxt_info_gen3->cr_head_idx_arr_base_addr = in iwl_pcie_ctxt_info_gen3_init()
222 cpu_to_le64(trans_pcie->rxq->rb_stts_dma); in iwl_pcie_ctxt_info_gen3_init()
223 ctxt_info_gen3->tr_tail_idx_arr_base_addr = in iwl_pcie_ctxt_info_gen3_init()
224 cpu_to_le64(trans_pcie->prph_info_dma_addr + PAGE_SIZE / 2); in iwl_pcie_ctxt_info_gen3_init()
225 ctxt_info_gen3->cr_tail_idx_arr_base_addr = in iwl_pcie_ctxt_info_gen3_init()
226 cpu_to_le64(trans_pcie->prph_info_dma_addr + 3 * PAGE_SIZE / 4); in iwl_pcie_ctxt_info_gen3_init()
227 ctxt_info_gen3->mtr_base_addr = in iwl_pcie_ctxt_info_gen3_init()
228 cpu_to_le64(trans_pcie->txqs.txq[trans_pcie->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_gen3_init()
229 ctxt_info_gen3->mcr_base_addr = in iwl_pcie_ctxt_info_gen3_init()
230 cpu_to_le64(trans_pcie->rxq->used_bd_dma); in iwl_pcie_ctxt_info_gen3_init()
231 ctxt_info_gen3->mtr_size = in iwl_pcie_ctxt_info_gen3_init()
233 ctxt_info_gen3->mcr_size = in iwl_pcie_ctxt_info_gen3_init()
234 cpu_to_le16(RX_QUEUE_CB_SIZE(trans->cfg->num_rbds)); in iwl_pcie_ctxt_info_gen3_init()
236 trans_pcie->ctxt_info_gen3 = ctxt_info_gen3; in iwl_pcie_ctxt_info_gen3_init()
237 trans_pcie->prph_info = prph_info; in iwl_pcie_ctxt_info_gen3_init()
238 trans_pcie->prph_scratch = prph_scratch; in iwl_pcie_ctxt_info_gen3_init()
241 trans_pcie->iml = dma_alloc_coherent(trans->dev, trans->iml_len, in iwl_pcie_ctxt_info_gen3_init()
242 &trans_pcie->iml_dma_addr, in iwl_pcie_ctxt_info_gen3_init()
244 if (!trans_pcie->iml) { in iwl_pcie_ctxt_info_gen3_init()
245 ret = -ENOMEM; in iwl_pcie_ctxt_info_gen3_init()
249 memcpy(trans_pcie->iml, trans->iml, trans->iml_len); in iwl_pcie_ctxt_info_gen3_init()
255 trans_pcie->ctxt_info_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
257 trans_pcie->iml_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
258 iwl_write32(trans, CSR_IML_SIZE_ADDR, trans->iml_len); in iwl_pcie_ctxt_info_gen3_init()
266 dma_free_coherent(trans->dev, sizeof(*trans_pcie->ctxt_info_gen3), in iwl_pcie_ctxt_info_gen3_init()
267 trans_pcie->ctxt_info_gen3, in iwl_pcie_ctxt_info_gen3_init()
268 trans_pcie->ctxt_info_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
269 trans_pcie->ctxt_info_gen3 = NULL; in iwl_pcie_ctxt_info_gen3_init()
271 dma_free_coherent(trans->dev, PAGE_SIZE, prph_info, in iwl_pcie_ctxt_info_gen3_init()
272 trans_pcie->prph_info_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
275 dma_free_coherent(trans->dev, in iwl_pcie_ctxt_info_gen3_init()
278 trans_pcie->prph_scratch_dma_addr); in iwl_pcie_ctxt_info_gen3_init()
287 if (trans_pcie->iml) { in iwl_pcie_ctxt_info_gen3_free()
288 dma_free_coherent(trans->dev, trans->iml_len, trans_pcie->iml, in iwl_pcie_ctxt_info_gen3_free()
289 trans_pcie->iml_dma_addr); in iwl_pcie_ctxt_info_gen3_free()
290 trans_pcie->iml_dma_addr = 0; in iwl_pcie_ctxt_info_gen3_free()
291 trans_pcie->iml = NULL; in iwl_pcie_ctxt_info_gen3_free()
299 if (!trans_pcie->ctxt_info_gen3) in iwl_pcie_ctxt_info_gen3_free()
303 dma_free_coherent(trans->dev, sizeof(*trans_pcie->ctxt_info_gen3), in iwl_pcie_ctxt_info_gen3_free()
304 trans_pcie->ctxt_info_gen3, in iwl_pcie_ctxt_info_gen3_free()
305 trans_pcie->ctxt_info_dma_addr); in iwl_pcie_ctxt_info_gen3_free()
306 trans_pcie->ctxt_info_dma_addr = 0; in iwl_pcie_ctxt_info_gen3_free()
307 trans_pcie->ctxt_info_gen3 = NULL; in iwl_pcie_ctxt_info_gen3_free()
309 dma_free_coherent(trans->dev, sizeof(*trans_pcie->prph_scratch), in iwl_pcie_ctxt_info_gen3_free()
310 trans_pcie->prph_scratch, in iwl_pcie_ctxt_info_gen3_free()
311 trans_pcie->prph_scratch_dma_addr); in iwl_pcie_ctxt_info_gen3_free()
312 trans_pcie->prph_scratch_dma_addr = 0; in iwl_pcie_ctxt_info_gen3_free()
313 trans_pcie->prph_scratch = NULL; in iwl_pcie_ctxt_info_gen3_free()
316 dma_free_coherent(trans->dev, PAGE_SIZE, trans_pcie->prph_info, in iwl_pcie_ctxt_info_gen3_free()
317 trans_pcie->prph_info_dma_addr); in iwl_pcie_ctxt_info_gen3_free()
318 trans_pcie->prph_info_dma_addr = 0; in iwl_pcie_ctxt_info_gen3_free()
319 trans_pcie->prph_info = NULL; in iwl_pcie_ctxt_info_gen3_free()
328 if (pnvm_data->n_chunks != UNFRAGMENTED_PNVM_PAYLOADS_NUMBER) { in iwl_pcie_load_payloads_continuously()
330 pnvm_data->n_chunks); in iwl_pcie_load_payloads_continuously()
331 return -EINVAL; in iwl_pcie_load_payloads_continuously()
334 len0 = pnvm_data->chunks[0].len; in iwl_pcie_load_payloads_continuously()
335 len1 = pnvm_data->chunks[1].len; in iwl_pcie_load_payloads_continuously()
336 if (len1 > 0xFFFFFFFF - len0) { in iwl_pcie_load_payloads_continuously()
338 return -EINVAL; in iwl_pcie_load_payloads_continuously()
342 dram->block = iwl_pcie_ctxt_info_dma_alloc_coherent(trans, len, in iwl_pcie_load_payloads_continuously()
343 &dram->physical); in iwl_pcie_load_payloads_continuously()
344 if (!dram->block) { in iwl_pcie_load_payloads_continuously()
346 return -ENOMEM; in iwl_pcie_load_payloads_continuously()
349 dram->size = len; in iwl_pcie_load_payloads_continuously()
350 memcpy(dram->block, pnvm_data->chunks[0].data, len0); in iwl_pcie_load_payloads_continuously()
351 memcpy((u8 *)dram->block + len0, pnvm_data->chunks[1].data, len1); in iwl_pcie_load_payloads_continuously()
361 struct iwl_dram_data *cur_payload_dram = &dram_regions->drams[0]; in iwl_pcie_load_payloads_segments()
362 struct iwl_dram_data *desc_dram = &dram_regions->prph_scratch_mem_desc; in iwl_pcie_load_payloads_segments()
370 desc_dram->block = iwl_pcie_ctxt_info_dma_alloc_coherent in iwl_pcie_load_payloads_segments()
373 &desc_dram->physical); in iwl_pcie_load_payloads_segments()
374 if (!desc_dram->block) { in iwl_pcie_load_payloads_segments()
376 return -ENOMEM; in iwl_pcie_load_payloads_segments()
378 desc_dram->size = len; in iwl_pcie_load_payloads_segments()
379 memset(desc_dram->block, 0, len); in iwl_pcie_load_payloads_segments()
382 dram_regions->n_regions = 0; in iwl_pcie_load_payloads_segments()
383 for (i = 0; i < pnvm_data->n_chunks; i++) { in iwl_pcie_load_payloads_segments()
384 len = pnvm_data->chunks[i].len; in iwl_pcie_load_payloads_segments()
385 data = pnvm_data->chunks[i].data; in iwl_pcie_load_payloads_segments()
392 trans->dev); in iwl_pcie_load_payloads_segments()
393 return -ENOMEM; in iwl_pcie_load_payloads_segments()
396 dram_regions->n_regions++; in iwl_pcie_load_payloads_segments()
401 addresses = desc_dram->block; in iwl_pcie_load_payloads_segments()
402 for (i = 0; i < pnvm_data->n_chunks; i++) { in iwl_pcie_load_payloads_segments()
403 addresses->mem_descs[i] = in iwl_pcie_load_payloads_segments()
404 cpu_to_le64(dram_regions->drams[i].physical); in iwl_pcie_load_payloads_segments()
417 &trans_pcie->prph_scratch->ctrl_cfg; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
418 struct iwl_dram_regions *dram_regions = &trans_pcie->pnvm_data; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
422 if (trans->pnvm_loaded) in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
425 if (WARN_ON(prph_sc_ctrl->pnvm_cfg.pnvm_size)) in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
426 return -EBUSY; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
428 if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_AX210) in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
431 if (!pnvm_payloads->n_chunks) { in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
433 return -EINVAL; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
442 trans->pnvm_loaded = true; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
448 &dram_regions->drams[0]); in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
450 dram_regions->n_regions = 1; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
451 trans->pnvm_loaded = true; in iwl_trans_pcie_ctx_info_gen3_load_pnvm()
464 for (i = 0; i < dram_regions->n_regions; i++) in iwl_dram_regions_size()
465 total_size += dram_regions->drams[i].size; in iwl_dram_regions_size()
474 &trans_pcie->prph_scratch->ctrl_cfg; in iwl_pcie_set_pnvm_segments()
475 struct iwl_dram_regions *dram_regions = &trans_pcie->pnvm_data; in iwl_pcie_set_pnvm_segments()
477 prph_sc_ctrl->pnvm_cfg.pnvm_base_addr = in iwl_pcie_set_pnvm_segments()
478 cpu_to_le64(dram_regions->prph_scratch_mem_desc.physical); in iwl_pcie_set_pnvm_segments()
479 prph_sc_ctrl->pnvm_cfg.pnvm_size = in iwl_pcie_set_pnvm_segments()
487 &trans_pcie->prph_scratch->ctrl_cfg; in iwl_pcie_set_continuous_pnvm()
489 prph_sc_ctrl->pnvm_cfg.pnvm_base_addr = in iwl_pcie_set_continuous_pnvm()
490 cpu_to_le64(trans_pcie->pnvm_data.drams[0].physical); in iwl_pcie_set_continuous_pnvm()
491 prph_sc_ctrl->pnvm_cfg.pnvm_size = in iwl_pcie_set_continuous_pnvm()
492 cpu_to_le32(trans_pcie->pnvm_data.drams[0].size); in iwl_pcie_set_continuous_pnvm()
498 if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_AX210) in iwl_trans_pcie_ctx_info_gen3_set_pnvm()
513 &trans_pcie->prph_scratch->ctrl_cfg; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
514 struct iwl_dram_regions *dram_regions = &trans_pcie->reduced_tables_data; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
518 if (trans->reduce_power_loaded) in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
521 if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_AX210) in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
524 if (WARN_ON(prph_sc_ctrl->reduce_power_cfg.size)) in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
525 return -EBUSY; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
527 if (!payloads->n_chunks) { in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
529 return -EINVAL; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
538 trans->reduce_power_loaded = true; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
544 &dram_regions->drams[0]); in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
546 dram_regions->n_regions = 1; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
547 trans->reduce_power_loaded = true; in iwl_trans_pcie_ctx_info_gen3_load_reduce_power()
558 &trans_pcie->prph_scratch->ctrl_cfg; in iwl_pcie_set_reduce_power_segments()
559 struct iwl_dram_regions *dram_regions = &trans_pcie->reduced_tables_data; in iwl_pcie_set_reduce_power_segments()
561 prph_sc_ctrl->reduce_power_cfg.base_addr = in iwl_pcie_set_reduce_power_segments()
562 cpu_to_le64(dram_regions->prph_scratch_mem_desc.physical); in iwl_pcie_set_reduce_power_segments()
563 prph_sc_ctrl->reduce_power_cfg.size = in iwl_pcie_set_reduce_power_segments()
571 &trans_pcie->prph_scratch->ctrl_cfg; in iwl_pcie_set_continuous_reduce_power()
573 prph_sc_ctrl->reduce_power_cfg.base_addr = in iwl_pcie_set_continuous_reduce_power()
574 cpu_to_le64(trans_pcie->reduced_tables_data.drams[0].physical); in iwl_pcie_set_continuous_reduce_power()
575 prph_sc_ctrl->reduce_power_cfg.size = in iwl_pcie_set_continuous_reduce_power()
576 cpu_to_le32(trans_pcie->reduced_tables_data.drams[0].size); in iwl_pcie_set_continuous_reduce_power()
583 if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_AX210) in iwl_trans_pcie_ctx_info_gen3_set_reduce_power()