Lines Matching full:tx_chn
115 bool tx_chn) in of_k3_udma_glue_parse_chn_common() argument
117 if (tx_chn && !(thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) in of_k3_udma_glue_parse_chn_common()
120 if (!tx_chn && (thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) in of_k3_udma_glue_parse_chn_common()
135 if (tx_chn) in of_k3_udma_glue_parse_chn_common()
145 bool tx_chn) in of_k3_udma_glue_parse_chn() argument
185 ret = of_k3_udma_glue_parse_chn_common(common, thread_id, tx_chn); in of_k3_udma_glue_parse_chn()
194 bool tx_chn, u32 thread_id) in of_k3_udma_glue_parse_chn_by_id() argument
205 ret = of_k3_udma_glue_parse_chn_common(common, thread_id, tx_chn); in of_k3_udma_glue_parse_chn_by_id()
209 static void k3_udma_glue_dump_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_dump_tx_chn() argument
211 struct device *dev = tx_chn->common.dev; in k3_udma_glue_dump_tx_chn()
217 tx_chn->udma_tchan_id, in k3_udma_glue_dump_tx_chn()
218 tx_chn->common.src_thread, in k3_udma_glue_dump_tx_chn()
219 tx_chn->common.dst_thread); in k3_udma_glue_dump_tx_chn()
241 static int k3_udma_glue_cfg_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_cfg_tx_chn() argument
243 const struct udma_tisci_rm *tisci_rm = tx_chn->common.tisci_rm; in k3_udma_glue_cfg_tx_chn()
257 req.index = tx_chn->udma_tchan_id; in k3_udma_glue_cfg_tx_chn()
258 if (tx_chn->tx_pause_on_err) in k3_udma_glue_cfg_tx_chn()
260 if (tx_chn->tx_filt_einfo) in k3_udma_glue_cfg_tx_chn()
262 if (tx_chn->tx_filt_pswords) in k3_udma_glue_cfg_tx_chn()
265 if (tx_chn->tx_supr_tdpkt) in k3_udma_glue_cfg_tx_chn()
267 req.tx_fetch_size = tx_chn->common.hdesc_size >> 2; in k3_udma_glue_cfg_tx_chn()
268 req.txcq_qnum = k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_cfg_tx_chn()
269 req.tx_atype = tx_chn->common.atype_asel; in k3_udma_glue_cfg_tx_chn()
276 struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_request_tx_chn_common() argument
281 tx_chn->common.hdesc_size = cppi5_hdesc_calc_size(tx_chn->common.epib, in k3_udma_glue_request_tx_chn_common()
282 tx_chn->common.psdata_size, in k3_udma_glue_request_tx_chn_common()
283 tx_chn->common.swdata_size); in k3_udma_glue_request_tx_chn_common()
285 if (xudma_is_pktdma(tx_chn->common.udmax)) in k3_udma_glue_request_tx_chn_common()
286 tx_chn->udma_tchan_id = tx_chn->common.ep_config->mapped_channel_id; in k3_udma_glue_request_tx_chn_common()
288 tx_chn->udma_tchan_id = -1; in k3_udma_glue_request_tx_chn_common()
291 tx_chn->udma_tchanx = xudma_tchan_get(tx_chn->common.udmax, in k3_udma_glue_request_tx_chn_common()
292 tx_chn->udma_tchan_id); in k3_udma_glue_request_tx_chn_common()
293 if (IS_ERR(tx_chn->udma_tchanx)) { in k3_udma_glue_request_tx_chn_common()
294 ret = PTR_ERR(tx_chn->udma_tchanx); in k3_udma_glue_request_tx_chn_common()
298 tx_chn->udma_tchan_id = xudma_tchan_get_id(tx_chn->udma_tchanx); in k3_udma_glue_request_tx_chn_common()
300 tx_chn->common.chan_dev.class = &k3_udma_glue_devclass; in k3_udma_glue_request_tx_chn_common()
301 tx_chn->common.chan_dev.parent = xudma_get_device(tx_chn->common.udmax); in k3_udma_glue_request_tx_chn_common()
302 dev_set_name(&tx_chn->common.chan_dev, "tchan%d-0x%04x", in k3_udma_glue_request_tx_chn_common()
303 tx_chn->udma_tchan_id, tx_chn->common.dst_thread); in k3_udma_glue_request_tx_chn_common()
304 ret = device_register(&tx_chn->common.chan_dev); in k3_udma_glue_request_tx_chn_common()
307 put_device(&tx_chn->common.chan_dev); in k3_udma_glue_request_tx_chn_common()
308 tx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_request_tx_chn_common()
312 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_request_tx_chn_common()
314 tx_chn->common.chan_dev.dma_coherent = true; in k3_udma_glue_request_tx_chn_common()
315 dma_coerce_mask_and_coherent(&tx_chn->common.chan_dev, in k3_udma_glue_request_tx_chn_common()
319 atomic_set(&tx_chn->free_pkts, cfg->txcq_cfg.size); in k3_udma_glue_request_tx_chn_common()
321 if (xudma_is_pktdma(tx_chn->common.udmax)) in k3_udma_glue_request_tx_chn_common()
322 tx_chn->udma_tflow_id = tx_chn->common.ep_config->default_flow_id; in k3_udma_glue_request_tx_chn_common()
324 tx_chn->udma_tflow_id = tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn_common()
327 ret = k3_ringacc_request_rings_pair(tx_chn->common.ringacc, in k3_udma_glue_request_tx_chn_common()
328 tx_chn->udma_tflow_id, -1, in k3_udma_glue_request_tx_chn_common()
329 &tx_chn->ringtx, in k3_udma_glue_request_tx_chn_common()
330 &tx_chn->ringtxcq); in k3_udma_glue_request_tx_chn_common()
337 cfg->tx_cfg.dma_dev = k3_udma_glue_tx_get_dma_device(tx_chn); in k3_udma_glue_request_tx_chn_common()
341 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_request_tx_chn_common()
342 cfg->tx_cfg.asel = tx_chn->common.atype_asel; in k3_udma_glue_request_tx_chn_common()
343 cfg->txcq_cfg.asel = tx_chn->common.atype_asel; in k3_udma_glue_request_tx_chn_common()
346 ret = k3_ringacc_ring_cfg(tx_chn->ringtx, &cfg->tx_cfg); in k3_udma_glue_request_tx_chn_common()
352 ret = k3_ringacc_ring_cfg(tx_chn->ringtxcq, &cfg->txcq_cfg); in k3_udma_glue_request_tx_chn_common()
359 tx_chn->common.src_thread = in k3_udma_glue_request_tx_chn_common()
360 xudma_dev_get_psil_base(tx_chn->common.udmax) + in k3_udma_glue_request_tx_chn_common()
361 tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn_common()
363 ret = k3_udma_glue_cfg_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn_common()
369 k3_udma_glue_dump_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn_common()
378 struct k3_udma_glue_tx_channel *tx_chn; in k3_udma_glue_request_tx_chn() local
381 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL); in k3_udma_glue_request_tx_chn()
382 if (!tx_chn) in k3_udma_glue_request_tx_chn()
385 tx_chn->common.dev = dev; in k3_udma_glue_request_tx_chn()
386 tx_chn->common.swdata_size = cfg->swdata_size; in k3_udma_glue_request_tx_chn()
387 tx_chn->tx_pause_on_err = cfg->tx_pause_on_err; in k3_udma_glue_request_tx_chn()
388 tx_chn->tx_filt_einfo = cfg->tx_filt_einfo; in k3_udma_glue_request_tx_chn()
389 tx_chn->tx_filt_pswords = cfg->tx_filt_pswords; in k3_udma_glue_request_tx_chn()
390 tx_chn->tx_supr_tdpkt = cfg->tx_supr_tdpkt; in k3_udma_glue_request_tx_chn()
394 &tx_chn->common, true); in k3_udma_glue_request_tx_chn()
398 ret = k3_udma_glue_request_tx_chn_common(dev, tx_chn, cfg); in k3_udma_glue_request_tx_chn()
402 return tx_chn; in k3_udma_glue_request_tx_chn()
405 k3_udma_glue_release_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
415 struct k3_udma_glue_tx_channel *tx_chn; in k3_udma_glue_request_tx_chn_for_thread_id() local
418 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL); in k3_udma_glue_request_tx_chn_for_thread_id()
419 if (!tx_chn) in k3_udma_glue_request_tx_chn_for_thread_id()
422 tx_chn->common.dev = dev; in k3_udma_glue_request_tx_chn_for_thread_id()
423 tx_chn->common.swdata_size = cfg->swdata_size; in k3_udma_glue_request_tx_chn_for_thread_id()
424 tx_chn->tx_pause_on_err = cfg->tx_pause_on_err; in k3_udma_glue_request_tx_chn_for_thread_id()
425 tx_chn->tx_filt_einfo = cfg->tx_filt_einfo; in k3_udma_glue_request_tx_chn_for_thread_id()
426 tx_chn->tx_filt_pswords = cfg->tx_filt_pswords; in k3_udma_glue_request_tx_chn_for_thread_id()
427 tx_chn->tx_supr_tdpkt = cfg->tx_supr_tdpkt; in k3_udma_glue_request_tx_chn_for_thread_id()
429 ret = of_k3_udma_glue_parse_chn_by_id(udmax_np, &tx_chn->common, true, thread_id); in k3_udma_glue_request_tx_chn_for_thread_id()
433 ret = k3_udma_glue_request_tx_chn_common(dev, tx_chn, cfg); in k3_udma_glue_request_tx_chn_for_thread_id()
437 return tx_chn; in k3_udma_glue_request_tx_chn_for_thread_id()
440 k3_udma_glue_release_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn_for_thread_id()
445 void k3_udma_glue_release_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_release_tx_chn() argument
447 if (tx_chn->psil_paired) { in k3_udma_glue_release_tx_chn()
448 xudma_navss_psil_unpair(tx_chn->common.udmax, in k3_udma_glue_release_tx_chn()
449 tx_chn->common.src_thread, in k3_udma_glue_release_tx_chn()
450 tx_chn->common.dst_thread); in k3_udma_glue_release_tx_chn()
451 tx_chn->psil_paired = false; in k3_udma_glue_release_tx_chn()
454 if (!IS_ERR_OR_NULL(tx_chn->udma_tchanx)) in k3_udma_glue_release_tx_chn()
455 xudma_tchan_put(tx_chn->common.udmax, in k3_udma_glue_release_tx_chn()
456 tx_chn->udma_tchanx); in k3_udma_glue_release_tx_chn()
458 if (tx_chn->ringtxcq) in k3_udma_glue_release_tx_chn()
459 k3_ringacc_ring_free(tx_chn->ringtxcq); in k3_udma_glue_release_tx_chn()
461 if (tx_chn->ringtx) in k3_udma_glue_release_tx_chn()
462 k3_ringacc_ring_free(tx_chn->ringtx); in k3_udma_glue_release_tx_chn()
464 if (tx_chn->common.chan_dev.parent) { in k3_udma_glue_release_tx_chn()
465 device_unregister(&tx_chn->common.chan_dev); in k3_udma_glue_release_tx_chn()
466 tx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_release_tx_chn()
471 int k3_udma_glue_push_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_push_tx_chn() argument
477 if (!atomic_add_unless(&tx_chn->free_pkts, -1, 0)) in k3_udma_glue_push_tx_chn()
480 ringtxcq_id = k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_push_tx_chn()
483 return k3_ringacc_ring_push(tx_chn->ringtx, &desc_dma); in k3_udma_glue_push_tx_chn()
487 int k3_udma_glue_pop_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_pop_tx_chn() argument
492 ret = k3_ringacc_ring_pop(tx_chn->ringtxcq, desc_dma); in k3_udma_glue_pop_tx_chn()
494 atomic_inc(&tx_chn->free_pkts); in k3_udma_glue_pop_tx_chn()
500 int k3_udma_glue_enable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_enable_tx_chn() argument
504 ret = xudma_navss_psil_pair(tx_chn->common.udmax, in k3_udma_glue_enable_tx_chn()
505 tx_chn->common.src_thread, in k3_udma_glue_enable_tx_chn()
506 tx_chn->common.dst_thread); in k3_udma_glue_enable_tx_chn()
508 dev_err(tx_chn->common.dev, "PSI-L request err %d\n", ret); in k3_udma_glue_enable_tx_chn()
512 tx_chn->psil_paired = true; in k3_udma_glue_enable_tx_chn()
514 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_PEER_RT_EN_REG, in k3_udma_glue_enable_tx_chn()
517 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_enable_tx_chn()
520 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn en"); in k3_udma_glue_enable_tx_chn()
525 void k3_udma_glue_disable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_disable_tx_chn() argument
527 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis1"); in k3_udma_glue_disable_tx_chn()
529 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, 0); in k3_udma_glue_disable_tx_chn()
531 xudma_tchanrt_write(tx_chn->udma_tchanx, in k3_udma_glue_disable_tx_chn()
533 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis2"); in k3_udma_glue_disable_tx_chn()
535 if (tx_chn->psil_paired) { in k3_udma_glue_disable_tx_chn()
536 xudma_navss_psil_unpair(tx_chn->common.udmax, in k3_udma_glue_disable_tx_chn()
537 tx_chn->common.src_thread, in k3_udma_glue_disable_tx_chn()
538 tx_chn->common.dst_thread); in k3_udma_glue_disable_tx_chn()
539 tx_chn->psil_paired = false; in k3_udma_glue_disable_tx_chn()
544 void k3_udma_glue_tdown_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tdown_tx_chn() argument
550 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown1"); in k3_udma_glue_tdown_tx_chn()
552 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_tdown_tx_chn()
555 val = xudma_tchanrt_read(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG); in k3_udma_glue_tdown_tx_chn()
558 val = xudma_tchanrt_read(tx_chn->udma_tchanx, in k3_udma_glue_tdown_tx_chn()
562 dev_err(tx_chn->common.dev, "TX tdown timeout\n"); in k3_udma_glue_tdown_tx_chn()
568 val = xudma_tchanrt_read(tx_chn->udma_tchanx, in k3_udma_glue_tdown_tx_chn()
571 dev_err(tx_chn->common.dev, "TX tdown peer not stopped\n"); in k3_udma_glue_tdown_tx_chn()
572 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown2"); in k3_udma_glue_tdown_tx_chn()
576 void k3_udma_glue_reset_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_reset_tx_chn() argument
580 struct device *dev = tx_chn->common.dev; in k3_udma_glue_reset_tx_chn()
591 occ_tx = k3_ringacc_ring_get_occ(tx_chn->ringtx); in k3_udma_glue_reset_tx_chn()
595 ret = k3_ringacc_ring_pop(tx_chn->ringtx, &desc_dma); in k3_udma_glue_reset_tx_chn()
605 k3_ringacc_ring_reset(tx_chn->ringtxcq); in k3_udma_glue_reset_tx_chn()
606 k3_ringacc_ring_reset_dma(tx_chn->ringtx, occ_tx); in k3_udma_glue_reset_tx_chn()
610 u32 k3_udma_glue_tx_get_hdesc_size(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_hdesc_size() argument
612 return tx_chn->common.hdesc_size; in k3_udma_glue_tx_get_hdesc_size()
616 u32 k3_udma_glue_tx_get_txcq_id(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_txcq_id() argument
618 return k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_tx_get_txcq_id()
622 int k3_udma_glue_tx_get_irq(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_irq() argument
624 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_tx_get_irq()
625 tx_chn->virq = xudma_pktdma_tflow_get_irq(tx_chn->common.udmax, in k3_udma_glue_tx_get_irq()
626 tx_chn->udma_tflow_id); in k3_udma_glue_tx_get_irq()
628 tx_chn->virq = k3_ringacc_get_ring_irq_num(tx_chn->ringtxcq); in k3_udma_glue_tx_get_irq()
631 if (!tx_chn->virq) in k3_udma_glue_tx_get_irq()
634 return tx_chn->virq; in k3_udma_glue_tx_get_irq()
639 k3_udma_glue_tx_get_dma_device(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_dma_device() argument
641 if (xudma_is_pktdma(tx_chn->common.udmax) && in k3_udma_glue_tx_get_dma_device()
642 (tx_chn->common.atype_asel == 14 || tx_chn->common.atype_asel == 15)) in k3_udma_glue_tx_get_dma_device()
643 return &tx_chn->common.chan_dev; in k3_udma_glue_tx_get_dma_device()
645 return xudma_get_device(tx_chn->common.udmax); in k3_udma_glue_tx_get_dma_device()
649 void k3_udma_glue_tx_dma_to_cppi5_addr(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tx_dma_to_cppi5_addr() argument
652 if (!xudma_is_pktdma(tx_chn->common.udmax) || in k3_udma_glue_tx_dma_to_cppi5_addr()
653 !tx_chn->common.atype_asel) in k3_udma_glue_tx_dma_to_cppi5_addr()
656 *addr |= (u64)tx_chn->common.atype_asel << K3_ADDRESS_ASEL_SHIFT; in k3_udma_glue_tx_dma_to_cppi5_addr()
660 void k3_udma_glue_tx_cppi5_to_dma_addr(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tx_cppi5_to_dma_addr() argument
663 if (!xudma_is_pktdma(tx_chn->common.udmax) || in k3_udma_glue_tx_cppi5_to_dma_addr()
664 !tx_chn->common.atype_asel) in k3_udma_glue_tx_cppi5_to_dma_addr()