Lines Matching +full:ahb +full:- +full:burst +full:- +full:config

1 // SPDX-License-Identifier: GPL-2.0-only
11 #include <linux/dma-mapping.h>
24 #include "../virt-dma.h"
56 /* MISR DMA non-secure/secure masked interrupt status register */
140 CTR1_PAM_0S_LT, /* if DDW > SDW, padded with 0s else left-truncated */
141 CTR1_PAM_SE_RT, /* if DDW > SDW, sign extended else right-truncated */
163 /* CxLLR DMA channel x linked-list address register */
192 AXI64, /* 1x AXI: 64-bit port 0 */
193 AHB32, /* 1x AHB: 32-bit port 0 */
194 AHB32_AHB32, /* 2x AHB: 32-bit port 0 and 32-bit port 1 */
195 AXI64_AHB32, /* 1x AXI 64-bit port 0 and 1x AHB 32-bit port 1 */
196 AXI64_AXI64, /* 2x AXI: 64-bit port 0 and 64-bit port 1 */
197 AXI128_AHB32, /* 1x AXI 128-bit port 0 and 1x AHB 32-bit port 1 */
201 DW_32, /* 32-bit, for AHB */
202 DW_64, /* 64-bit, for AXI */
203 DW_128, /* 128-bit, for AXI */
243 /* Static linked-list data structure (depends on update bits UT1/UT2/UB1/USA/UDA/ULL) */
255 * by the pointer to the next linked-list data structure. The __aligned forces the 32-byte
262 * Linked-list items
317 return container_of(chan->vchan.chan.device, struct stm32_dma3_ddata, dma_dev); in to_stm32_dma3_ddata()
332 return &chan->vchan.chan.dev->device; in chan2dev()
339 u32 id = chan->id, offset; in stm32_dma3_chan_dump_reg()
342 dev_dbg(dev, "SECCFGR(0x%03x): %08x\n", offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
344 dev_dbg(dev, "PRIVCFGR(0x%03x): %08x\n", offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
346 dev_dbg(dev, "C%dCIDCFGR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
348 dev_dbg(dev, "C%dSEMCR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
350 dev_dbg(dev, "C%dSR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
352 dev_dbg(dev, "C%dCR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
354 dev_dbg(dev, "C%dTR1(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
356 dev_dbg(dev, "C%dTR2(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
358 dev_dbg(dev, "C%dBR1(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
360 dev_dbg(dev, "C%dSAR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
362 dev_dbg(dev, "C%dDAR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
364 dev_dbg(dev, "C%dLLR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
366 dev_dbg(dev, "C%dLBAR(0x%03x): %08x\n", id, offset, readl_relaxed(ddata->base + offset)); in stm32_dma3_chan_dump_reg()
375 for (i = 0; i < swdesc->lli_size; i++) { in stm32_dma3_chan_dump_hwdesc()
376 hwdesc = swdesc->lli[i].hwdesc; in stm32_dma3_chan_dump_hwdesc()
379 dev_dbg(chan2dev(chan), "[%d]@%pad\n", i, &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_dump_hwdesc()
380 dev_dbg(chan2dev(chan), "| C%dTR1: %08x\n", chan->id, hwdesc->ctr1); in stm32_dma3_chan_dump_hwdesc()
381 dev_dbg(chan2dev(chan), "| C%dTR2: %08x\n", chan->id, hwdesc->ctr2); in stm32_dma3_chan_dump_hwdesc()
382 dev_dbg(chan2dev(chan), "| C%dBR1: %08x\n", chan->id, hwdesc->cbr1); in stm32_dma3_chan_dump_hwdesc()
383 dev_dbg(chan2dev(chan), "| C%dSAR: %08x\n", chan->id, hwdesc->csar); in stm32_dma3_chan_dump_hwdesc()
384 dev_dbg(chan2dev(chan), "| C%dDAR: %08x\n", chan->id, hwdesc->cdar); in stm32_dma3_chan_dump_hwdesc()
385 dev_dbg(chan2dev(chan), "| C%dLLR: %08x\n", chan->id, hwdesc->cllr); in stm32_dma3_chan_dump_hwdesc()
388 if (swdesc->cyclic) { in stm32_dma3_chan_dump_hwdesc()
390 dev_dbg(chan2dev(chan), "-->[0]@%pad\n", &swdesc->lli[0].hwdesc_addr); in stm32_dma3_chan_dump_hwdesc()
403 * If the memory to be allocated for the number of hwdesc (6 u32 members but 32-bytes in stm32_dma3_chan_desc_alloc()
415 swdesc->lli_size = count; in stm32_dma3_chan_desc_alloc()
418 swdesc->lli[i].hwdesc = dma_pool_zalloc(chan->lli_pool, GFP_NOWAIT, in stm32_dma3_chan_desc_alloc()
419 &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc()
420 if (!swdesc->lli[i].hwdesc) in stm32_dma3_chan_desc_alloc()
423 swdesc->ccr = 0; in stm32_dma3_chan_desc_alloc()
426 writel_relaxed(swdesc->lli[0].hwdesc_addr & CLBAR_LBA, in stm32_dma3_chan_desc_alloc()
427 ddata->base + STM32_DMA3_CLBAR(chan->id)); in stm32_dma3_chan_desc_alloc()
430 swdesc->ccr &= ~CCR_LAP; in stm32_dma3_chan_desc_alloc()
436 while (--i >= 0) in stm32_dma3_chan_desc_alloc()
437 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc()
448 for (i = 0; i < swdesc->lli_size; i++) in stm32_dma3_chan_desc_free()
449 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_free()
457 struct stm32_dma3_chan *chan = to_stm32_dma3_chan(vdesc->tx.chan); in stm32_dma3_chan_vdesc_free()
466 u32 ctr1 = readl_relaxed(ddata->base + STM32_DMA3_CTR1(chan->id)); in stm32_dma3_check_user_setting()
467 u32 cbr1 = readl_relaxed(ddata->base + STM32_DMA3_CBR1(chan->id)); in stm32_dma3_check_user_setting()
468 u32 csar = readl_relaxed(ddata->base + STM32_DMA3_CSAR(chan->id)); in stm32_dma3_check_user_setting()
469 u32 cdar = readl_relaxed(ddata->base + STM32_DMA3_CDAR(chan->id)); in stm32_dma3_check_user_setting()
470 u32 cllr = readl_relaxed(ddata->base + STM32_DMA3_CLLR(chan->id)); in stm32_dma3_check_user_setting()
487 if (sdw == DMA_SLAVE_BUSWIDTH_8_BYTES && port_is_ahb(ddata->ports_max_dw[sap])) in stm32_dma3_check_user_setting()
488 dev_err(dev, "double-word source data width not supported on port %u\n", sap); in stm32_dma3_check_user_setting()
489 if (ddw == DMA_SLAVE_BUSWIDTH_8_BYTES && port_is_ahb(ddata->ports_max_dw[dap])) in stm32_dma3_check_user_setting()
490 dev_err(dev, "double-word destination data width not supported on port %u\n", dap); in stm32_dma3_check_user_setting()
502 hwdesc = swdesc->lli[curr].hwdesc; in stm32_dma3_chan_prep_hwdesc()
503 hwdesc->ctr1 = ctr1; in stm32_dma3_chan_prep_hwdesc()
504 hwdesc->ctr2 = ctr2; in stm32_dma3_chan_prep_hwdesc()
505 hwdesc->cbr1 = FIELD_PREP(CBR1_BNDT, len); in stm32_dma3_chan_prep_hwdesc()
506 hwdesc->csar = src; in stm32_dma3_chan_prep_hwdesc()
507 hwdesc->cdar = dst; in stm32_dma3_chan_prep_hwdesc()
511 next_lli = swdesc->lli[0].hwdesc_addr; in stm32_dma3_chan_prep_hwdesc()
515 next_lli = swdesc->lli[next].hwdesc_addr; in stm32_dma3_chan_prep_hwdesc()
518 hwdesc->cllr = 0; in stm32_dma3_chan_prep_hwdesc()
520 hwdesc->cllr |= CLLR_UT1 | CLLR_UT2 | CLLR_UB1; in stm32_dma3_chan_prep_hwdesc()
521 hwdesc->cllr |= CLLR_USA | CLLR_UDA | CLLR_ULL; in stm32_dma3_chan_prep_hwdesc()
522 hwdesc->cllr |= (next_lli & CLLR_LA); in stm32_dma3_chan_prep_hwdesc()
550 /* len is a multiple of dw, so if len is < chan_max_burst, shorten burst */ in stm32_dma3_get_max_burst()
555 * HW doesn't modify the burst if burst size <= half of the fifo size. in stm32_dma3_get_max_burst()
556 * If len is not a multiple of burst size, last burst is shortened by HW. in stm32_dma3_get_max_burst()
557 * Take care of maximum burst supported on interconnect bus. in stm32_dma3_get_max_burst()
567 struct dma_device dma_device = ddata->dma_dev; in stm32_dma3_chan_prep_hw()
571 u32 ch_conf = chan->dt_config.ch_conf; in stm32_dma3_chan_prep_hw()
572 u32 tr_conf = chan->dt_config.tr_conf; in stm32_dma3_chan_prep_hw()
579 sdw = chan->dma_config.src_addr_width ? : get_chan_max_dw(sap, chan->max_burst); in stm32_dma3_chan_prep_hw()
580 ddw = chan->dma_config.dst_addr_width ? : get_chan_max_dw(dap, chan->max_burst); in stm32_dma3_chan_prep_hw()
581 sbl_max = chan->dma_config.src_maxburst ? : 1; in stm32_dma3_chan_prep_hw()
582 dbl_max = chan->dma_config.dst_maxburst ? : 1; in stm32_dma3_chan_prep_hw()
587 return -EINVAL; in stm32_dma3_chan_prep_hw()
590 if (ddata->ports_max_dw[1] == DW_INVALID && (sap || dap)) { in stm32_dma3_chan_prep_hw()
592 return -EINVAL; in stm32_dma3_chan_prep_hw()
595 sap_max_dw = ddata->ports_max_dw[sap]; in stm32_dma3_chan_prep_hw()
596 dap_max_dw = ddata->ports_max_dw[dap]; in stm32_dma3_chan_prep_hw()
602 return -EINVAL; in stm32_dma3_chan_prep_hw()
609 if (port_is_axi(sap_max_dw)) /* AXI - apply axi maximum burst limitation */ in stm32_dma3_chan_prep_hw()
610 src_max_burst = ddata->axi_max_burst_len; in stm32_dma3_chan_prep_hw()
615 if (port_is_axi(dap_max_dw)) /* AXI - apply axi maximum burst limitation */ in stm32_dma3_chan_prep_hw()
616 dst_max_burst = ddata->axi_max_burst_len; in stm32_dma3_chan_prep_hw()
618 _ctr2 |= FIELD_PREP(CTR2_REQSEL, chan->dt_config.req_line) & ~CTR2_SWREQ; in stm32_dma3_chan_prep_hw()
627 chan->tcem = tcem; in stm32_dma3_chan_prep_hw()
629 chan->dma_config.direction = dir; in stm32_dma3_chan_prep_hw()
633 /* Set destination (device) data width and burst */ in stm32_dma3_chan_prep_hw()
634 ddw = min_t(u32, ddw, stm32_dma3_get_max_dw(chan->max_burst, dap_max_dw, in stm32_dma3_chan_prep_hw()
636 dbl_max = min_t(u32, dbl_max, stm32_dma3_get_max_burst(len, ddw, chan->max_burst, in stm32_dma3_chan_prep_hw()
639 /* Set source (memory) data width and burst */ in stm32_dma3_chan_prep_hw()
640 sdw = stm32_dma3_get_max_dw(chan->max_burst, sap_max_dw, len, src_addr); in stm32_dma3_chan_prep_hw()
641 sbl_max = stm32_dma3_get_max_burst(len, sdw, chan->max_burst, src_max_burst); in stm32_dma3_chan_prep_hw()
648 _ctr1 |= FIELD_PREP(CTR1_SBL_1, sbl_max - 1); in stm32_dma3_chan_prep_hw()
650 _ctr1 |= FIELD_PREP(CTR1_DBL_1, dbl_max - 1); in stm32_dma3_chan_prep_hw()
655 if (len & (ddw - 1)) { in stm32_dma3_chan_prep_hw()
658 return -EINVAL; in stm32_dma3_chan_prep_hw()
668 /* Set source (device) data width and burst */ in stm32_dma3_chan_prep_hw()
669 sdw = min_t(u32, sdw, stm32_dma3_get_max_dw(chan->max_burst, sap_max_dw, in stm32_dma3_chan_prep_hw()
671 sbl_max = min_t(u32, sbl_max, stm32_dma3_get_max_burst(len, sdw, chan->max_burst, in stm32_dma3_chan_prep_hw()
674 /* Set destination (memory) data width and burst */ in stm32_dma3_chan_prep_hw()
675 ddw = stm32_dma3_get_max_dw(chan->max_burst, dap_max_dw, len, dst_addr); in stm32_dma3_chan_prep_hw()
676 dbl_max = stm32_dma3_get_max_burst(len, ddw, chan->max_burst, dst_max_burst); in stm32_dma3_chan_prep_hw()
684 _ctr1 |= FIELD_PREP(CTR1_SBL_1, sbl_max - 1); in stm32_dma3_chan_prep_hw()
686 _ctr1 |= FIELD_PREP(CTR1_DBL_1, dbl_max - 1); in stm32_dma3_chan_prep_hw()
691 if (len & (ddw - 1)) { in stm32_dma3_chan_prep_hw()
694 return -EINVAL; in stm32_dma3_chan_prep_hw()
704 /* Set source (memory) data width and burst */ in stm32_dma3_chan_prep_hw()
707 sdw = stm32_dma3_get_max_dw(chan->max_burst, sap_max_dw, len, src_addr); in stm32_dma3_chan_prep_hw()
708 sbl_max = stm32_dma3_get_max_burst(len, sdw, chan->max_burst, src_max_burst); in stm32_dma3_chan_prep_hw()
709 if (chan->config_set & STM32_DMA3_CFG_SET_DMA) { in stm32_dma3_chan_prep_hw()
712 chan->max_burst, in stm32_dma3_chan_prep_hw()
716 /* Set destination (memory) data width and burst */ in stm32_dma3_chan_prep_hw()
719 ddw = stm32_dma3_get_max_dw(chan->max_burst, dap_max_dw, len, dst_addr); in stm32_dma3_chan_prep_hw()
720 dbl_max = stm32_dma3_get_max_burst(len, ddw, chan->max_burst, dst_max_burst); in stm32_dma3_chan_prep_hw()
721 if (chan->config_set & STM32_DMA3_CFG_SET_DMA) { in stm32_dma3_chan_prep_hw()
724 chan->max_burst, in stm32_dma3_chan_prep_hw()
729 _ctr1 |= FIELD_PREP(CTR1_SBL_1, sbl_max - 1); in stm32_dma3_chan_prep_hw()
731 _ctr1 |= FIELD_PREP(CTR1_DBL_1, dbl_max - 1); in stm32_dma3_chan_prep_hw()
736 if (len & (ddw - 1)) { in stm32_dma3_chan_prep_hw()
739 return -EINVAL; in stm32_dma3_chan_prep_hw()
751 return -EINVAL; in stm32_dma3_chan_prep_hw()
769 u32 id = chan->id; in stm32_dma3_chan_start()
772 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma3_chan_start()
774 chan->swdesc = NULL; in stm32_dma3_chan_start()
777 list_del(&vdesc->node); in stm32_dma3_chan_start()
779 chan->swdesc = to_stm32_dma3_swdesc(vdesc); in stm32_dma3_chan_start()
780 hwdesc = chan->swdesc->lli[0].hwdesc; in stm32_dma3_chan_start()
782 stm32_dma3_chan_dump_hwdesc(chan, chan->swdesc); in stm32_dma3_chan_start()
784 writel_relaxed(chan->swdesc->ccr, ddata->base + STM32_DMA3_CCR(id)); in stm32_dma3_chan_start()
785 writel_relaxed(hwdesc->ctr1, ddata->base + STM32_DMA3_CTR1(id)); in stm32_dma3_chan_start()
786 writel_relaxed(hwdesc->ctr2, ddata->base + STM32_DMA3_CTR2(id)); in stm32_dma3_chan_start()
787 writel_relaxed(hwdesc->cbr1, ddata->base + STM32_DMA3_CBR1(id)); in stm32_dma3_chan_start()
788 writel_relaxed(hwdesc->csar, ddata->base + STM32_DMA3_CSAR(id)); in stm32_dma3_chan_start()
789 writel_relaxed(hwdesc->cdar, ddata->base + STM32_DMA3_CDAR(id)); in stm32_dma3_chan_start()
790 writel_relaxed(hwdesc->cllr, ddata->base + STM32_DMA3_CLLR(id)); in stm32_dma3_chan_start()
793 csr = readl_relaxed(ddata->base + STM32_DMA3_CSR(id)); in stm32_dma3_chan_start()
795 writel_relaxed(csr, ddata->base + STM32_DMA3_CFCR(id)); in stm32_dma3_chan_start()
799 ccr = readl_relaxed(ddata->base + STM32_DMA3_CCR(id)); in stm32_dma3_chan_start()
800 writel_relaxed(ccr | CCR_EN, ddata->base + STM32_DMA3_CCR(id)); in stm32_dma3_chan_start()
802 chan->dma_status = DMA_IN_PROGRESS; in stm32_dma3_chan_start()
804 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_dma3_chan_start()
810 u32 csr, ccr = readl_relaxed(ddata->base + STM32_DMA3_CCR(chan->id)) & ~CCR_EN; in stm32_dma3_chan_suspend()
818 writel_relaxed(ccr, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_suspend()
821 ret = readl_relaxed_poll_timeout_atomic(ddata->base + STM32_DMA3_CSR(chan->id), csr, in stm32_dma3_chan_suspend()
824 writel_relaxed(CFCR_SUSPF, ddata->base + STM32_DMA3_CFCR(chan->id)); in stm32_dma3_chan_suspend()
835 u32 ccr = readl_relaxed(ddata->base + STM32_DMA3_CCR(chan->id)) & ~CCR_EN; in stm32_dma3_chan_reset()
837 writel_relaxed(ccr |= CCR_RESET, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_reset()
846 return swdesc->lli_size - 1; in stm32_dma3_chan_get_curr_hwdesc()
849 if (swdesc->cyclic && next_lli_offset == (swdesc->lli[0].hwdesc_addr & CLLR_LA)) in stm32_dma3_chan_get_curr_hwdesc()
850 return swdesc->lli_size - 1; in stm32_dma3_chan_get_curr_hwdesc()
853 for (i = swdesc->lli_size - 1; i > 0; i--) { in stm32_dma3_chan_get_curr_hwdesc()
854 *residue += FIELD_GET(CBR1_BNDT, swdesc->lli[i].hwdesc->cbr1); in stm32_dma3_chan_get_curr_hwdesc()
855 lli_offset = swdesc->lli[i].hwdesc_addr & CLLR_LA; in stm32_dma3_chan_get_curr_hwdesc()
857 return i - 1; in stm32_dma3_chan_get_curr_hwdesc()
860 return -EINVAL; in stm32_dma3_chan_get_curr_hwdesc()
874 csr = readl_relaxed(ddata->base + STM32_DMA3_CSR(chan->id)); in stm32_dma3_chan_set_residue()
875 if (!(csr & CSR_IDLEF) && chan->dma_status != DMA_PAUSED) { in stm32_dma3_chan_set_residue()
877 writel_relaxed(swdesc->ccr | CCR_SUSP, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_set_residue()
878 ret = readl_relaxed_poll_timeout_atomic(ddata->base + STM32_DMA3_CSR(chan->id), csr, in stm32_dma3_chan_set_residue()
882 writel_relaxed(CFCR_SUSPF, ddata->base + STM32_DMA3_CFCR(chan->id)); in stm32_dma3_chan_set_residue()
883 writel_relaxed(swdesc->ccr, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_set_residue()
895 * linked-list is over, no residue in stm32_dma3_chan_set_residue()
901 cllr = readl_relaxed(ddata->base + STM32_DMA3_CLLR(chan->id)); in stm32_dma3_chan_set_residue()
902 cbr1 = readl_relaxed(ddata->base + STM32_DMA3_CBR1(chan->id)); in stm32_dma3_chan_set_residue()
903 cdar = readl_relaxed(ddata->base + STM32_DMA3_CDAR(chan->id)); in stm32_dma3_chan_set_residue()
907 writel_relaxed(CFCR_SUSPF, ddata->base + STM32_DMA3_CFCR(chan->id)); in stm32_dma3_chan_set_residue()
908 writel_relaxed(swdesc->ccr, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_set_residue()
923 /* Read current FIFO level - in units of programmed destination data width */ in stm32_dma3_chan_set_residue()
924 hwdesc = swdesc->lli[curr_lli].hwdesc; in stm32_dma3_chan_set_residue()
925 fifol = FIELD_GET(CSR_FIFOL, csr) * (1 << FIELD_GET(CTR1_DDW_LOG2, hwdesc->ctr1)); in stm32_dma3_chan_set_residue()
927 if (fifol == (1 << (chan->fifo_size + 1))) in stm32_dma3_chan_set_residue()
931 * In case of PACKING (Destination burst length > Source burst length) or UNPACKING in stm32_dma3_chan_set_residue()
932 * (Source burst length > Destination burst length), bytes could be pending in the FIFO in stm32_dma3_chan_set_residue()
933 * (to be packed up to Destination burst length or unpacked into Destination burst length in stm32_dma3_chan_set_residue()
941 * fifol_in_bytes = bytes_read - bytes_written. in stm32_dma3_chan_set_residue()
943 pack_unpack = !!(FIELD_GET(CTR1_PAM, hwdesc->ctr1) == CTR1_PAM_PACK_UNPACK); in stm32_dma3_chan_set_residue()
944 if (pack_unpack && (hwdesc->ctr1 & CTR1_DINC)) { in stm32_dma3_chan_set_residue()
945 int bytes_read = FIELD_GET(CBR1_BNDT, hwdesc->cbr1) - bndt; in stm32_dma3_chan_set_residue()
946 int bytes_written = cdar - hwdesc->cdar; in stm32_dma3_chan_set_residue()
949 fifol = bytes_read - bytes_written; in stm32_dma3_chan_set_residue()
962 if (chan->dma_config.direction == DMA_DEV_TO_MEM) in stm32_dma3_chan_set_residue()
974 chan->dma_status = DMA_COMPLETE; in stm32_dma3_chan_stop()
977 ccr = readl_relaxed(ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_stop()
978 writel_relaxed(ccr & ~(CCR_ALLIE | CCR_EN), ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_stop()
998 if (!chan->swdesc) in stm32_dma3_chan_complete()
1001 vchan_cookie_complete(&chan->swdesc->vdesc); in stm32_dma3_chan_complete()
1002 chan->swdesc = NULL; in stm32_dma3_chan_complete()
1012 spin_lock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1014 misr = readl_relaxed(ddata->base + STM32_DMA3_MISR); in stm32_dma3_chan_irq()
1015 if (!(misr & MISR_MIS(chan->id))) { in stm32_dma3_chan_irq()
1016 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1020 csr = readl_relaxed(ddata->base + STM32_DMA3_CSR(chan->id)); in stm32_dma3_chan_irq()
1021 ccr = readl_relaxed(ddata->base + STM32_DMA3_CCR(chan->id)) & CCR_ALLIE; in stm32_dma3_chan_irq()
1024 if (chan->swdesc->cyclic) in stm32_dma3_chan_irq()
1025 vchan_cyclic_callback(&chan->swdesc->vdesc); in stm32_dma3_chan_irq()
1032 chan->dma_status = DMA_ERROR; in stm32_dma3_chan_irq()
1040 chan->dma_status = DMA_ERROR; in stm32_dma3_chan_irq()
1047 chan->dma_status = DMA_ERROR; in stm32_dma3_chan_irq()
1059 writel_relaxed(csr, ddata->base + STM32_DMA3_CFCR(chan->id)); in stm32_dma3_chan_irq()
1061 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1070 u32 id = chan->id, csemcr, ccid; in stm32_dma3_alloc_chan_resources()
1073 ret = pm_runtime_resume_and_get(ddata->dma_dev.dev); in stm32_dma3_alloc_chan_resources()
1078 if (chan->semaphore_mode && in stm32_dma3_alloc_chan_resources()
1079 readl_relaxed(ddata->base + STM32_DMA3_CSEMCR(chan->id)) & CSEMCR_SEM_MUTEX) { in stm32_dma3_alloc_chan_resources()
1080 ret = -EBUSY; in stm32_dma3_alloc_chan_resources()
1084 chan->lli_pool = dmam_pool_create(dev_name(&c->dev->device), c->device->dev, in stm32_dma3_alloc_chan_resources()
1087 if (!chan->lli_pool) { in stm32_dma3_alloc_chan_resources()
1089 ret = -ENOMEM; in stm32_dma3_alloc_chan_resources()
1094 if (chan->semaphore_mode) { in stm32_dma3_alloc_chan_resources()
1095 writel_relaxed(CSEMCR_SEM_MUTEX, ddata->base + STM32_DMA3_CSEMCR(id)); in stm32_dma3_alloc_chan_resources()
1096 csemcr = readl_relaxed(ddata->base + STM32_DMA3_CSEMCR(id)); in stm32_dma3_alloc_chan_resources()
1100 dev_err(chan2dev(chan), "Not under CID1 control (in-use by CID%d)\n", ccid); in stm32_dma3_alloc_chan_resources()
1101 ret = -EPERM; in stm32_dma3_alloc_chan_resources()
1110 dmam_pool_destroy(chan->lli_pool); in stm32_dma3_alloc_chan_resources()
1111 chan->lli_pool = NULL; in stm32_dma3_alloc_chan_resources()
1114 pm_runtime_put_sync(ddata->dma_dev.dev); in stm32_dma3_alloc_chan_resources()
1126 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_free_chan_resources()
1128 chan->swdesc = NULL; in stm32_dma3_free_chan_resources()
1129 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma3_free_chan_resources()
1133 dmam_pool_destroy(chan->lli_pool); in stm32_dma3_free_chan_resources()
1134 chan->lli_pool = NULL; in stm32_dma3_free_chan_resources()
1137 if (chan->semaphore_mode) in stm32_dma3_free_chan_resources()
1138 writel_relaxed(0, ddata->base + STM32_DMA3_CSEMCR(chan->id)); in stm32_dma3_free_chan_resources()
1140 pm_runtime_put_sync(ddata->dma_dev.dev); in stm32_dma3_free_chan_resources()
1143 memset(&chan->dt_config, 0, sizeof(chan->dt_config)); in stm32_dma3_free_chan_resources()
1144 memset(&chan->dma_config, 0, sizeof(chan->dma_config)); in stm32_dma3_free_chan_resources()
1145 chan->config_set = 0; in stm32_dma3_free_chan_resources()
1156 len -= (len / STM32_DMA3_MAX_BLOCK_SIZE) * STM32_DMA3_MAX_BLOCK_SIZE; in stm32_dma3_get_ll_count()
1158 if (len >= chan->max_burst) { in stm32_dma3_get_ll_count()
1160 len -= (len / chan->max_burst) * chan->max_burst; in stm32_dma3_get_ll_count()
1174 u32 dw = get_chan_max_dw(ddata->ports_max_dw[0], chan->max_burst); /* port 0 by default */ in stm32_dma3_init_chan_config_for_memcpy()
1175 u32 burst = chan->max_burst / dw; in stm32_dma3_init_chan_config_for_memcpy() local
1177 /* Initialize dt_config if channel not pre-configured through DT */ in stm32_dma3_init_chan_config_for_memcpy()
1178 if (!(chan->config_set & STM32_DMA3_CFG_SET_DT)) { in stm32_dma3_init_chan_config_for_memcpy()
1179 chan->dt_config.ch_conf = FIELD_PREP(STM32_DMA3_DT_PRIO, CCR_PRIO_VERY_HIGH); in stm32_dma3_init_chan_config_for_memcpy()
1180 chan->dt_config.ch_conf |= FIELD_PREP(STM32_DMA3_DT_FIFO, chan->fifo_size); in stm32_dma3_init_chan_config_for_memcpy()
1181 chan->dt_config.tr_conf = STM32_DMA3_DT_SINC | STM32_DMA3_DT_DINC; in stm32_dma3_init_chan_config_for_memcpy()
1182 chan->dt_config.tr_conf |= FIELD_PREP(STM32_DMA3_DT_TCEM, CTR2_TCEM_CHANNEL); in stm32_dma3_init_chan_config_for_memcpy()
1186 if (!(chan->config_set & STM32_DMA3_CFG_SET_DMA)) { in stm32_dma3_init_chan_config_for_memcpy()
1187 chan->dma_config.src_addr_width = dw; in stm32_dma3_init_chan_config_for_memcpy()
1188 chan->dma_config.dst_addr_width = dw; in stm32_dma3_init_chan_config_for_memcpy()
1189 chan->dma_config.src_maxburst = burst; in stm32_dma3_init_chan_config_for_memcpy()
1190 chan->dma_config.dst_maxburst = burst; in stm32_dma3_init_chan_config_for_memcpy()
1191 chan->dma_config.src_addr = src; in stm32_dma3_init_chan_config_for_memcpy()
1192 chan->dma_config.dst_addr = dst; in stm32_dma3_init_chan_config_for_memcpy()
1204 bool prevent_refactor = !!FIELD_GET(STM32_DMA3_DT_NOPACK, chan->dt_config.tr_conf) || in stm32_dma3_prep_dma_memcpy()
1205 !!FIELD_GET(STM32_DMA3_DT_NOREFACT, chan->dt_config.tr_conf); in stm32_dma3_prep_dma_memcpy()
1213 if (chan->config_set != STM32_DMA3_CFG_SET_BOTH) in stm32_dma3_prep_dma_memcpy()
1220 remaining = len - offset; in stm32_dma3_prep_dma_memcpy()
1224 (next_size < STM32_DMA3_MAX_BLOCK_SIZE && next_size >= chan->max_burst)) in stm32_dma3_prep_dma_memcpy()
1225 next_size = chan->max_burst * (remaining / chan->max_burst); in stm32_dma3_prep_dma_memcpy()
1227 ret = stm32_dma3_chan_prep_hw(chan, DMA_MEM_TO_MEM, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_dma_memcpy()
1237 swdesc->ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE; in stm32_dma3_prep_dma_memcpy()
1239 swdesc->ccr |= CCR_TCIE; in stm32_dma3_prep_dma_memcpy()
1241 swdesc->cyclic = false; in stm32_dma3_prep_dma_memcpy()
1243 return vchan_tx_prep(&chan->vchan, &swdesc->vdesc, flags); in stm32_dma3_prep_dma_memcpy()
1263 bool prevent_refactor = !!FIELD_GET(STM32_DMA3_DT_NOPACK, chan->dt_config.tr_conf) || in stm32_dma3_prep_slave_sg()
1264 !!FIELD_GET(STM32_DMA3_DT_NOREFACT, chan->dt_config.tr_conf); in stm32_dma3_prep_slave_sg()
1279 dev_addr = (dir == DMA_MEM_TO_DEV) ? chan->dma_config.dst_addr : in stm32_dma3_prep_slave_sg()
1280 chan->dma_config.src_addr; in stm32_dma3_prep_slave_sg()
1287 (chunk < STM32_DMA3_MAX_BLOCK_SIZE && chunk >= chan->max_burst)) in stm32_dma3_prep_slave_sg()
1288 chunk = chan->max_burst * (len / chan->max_burst); in stm32_dma3_prep_slave_sg()
1294 ret = stm32_dma3_chan_prep_hw(chan, dir, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_slave_sg()
1303 ret = stm32_dma3_chan_prep_hw(chan, dir, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_slave_sg()
1314 ctr1, ctr2, j == (count - 1), false); in stm32_dma3_prep_slave_sg()
1317 len -= chunk; in stm32_dma3_prep_slave_sg()
1322 if (count != sg_len && chan->tcem != CTR2_TCEM_CHANNEL) in stm32_dma3_prep_slave_sg()
1323 dev_warn(chan2dev(chan), "Linked-list refactored, %d items instead of %d\n", in stm32_dma3_prep_slave_sg()
1327 swdesc->ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE; in stm32_dma3_prep_slave_sg()
1329 swdesc->ccr |= CCR_TCIE; in stm32_dma3_prep_slave_sg()
1331 swdesc->cyclic = false; in stm32_dma3_prep_slave_sg()
1333 return vchan_tx_prep(&chan->vchan, &swdesc->vdesc, flags); in stm32_dma3_prep_slave_sg()
1370 dst = chan->dma_config.dst_addr; in stm32_dma3_prep_dma_cyclic()
1372 ret = stm32_dma3_chan_prep_hw(chan, DMA_MEM_TO_DEV, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_dma_cyclic()
1375 src = chan->dma_config.src_addr; in stm32_dma3_prep_dma_cyclic()
1378 ret = stm32_dma3_chan_prep_hw(chan, DMA_DEV_TO_MEM, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_dma_cyclic()
1382 ret = -EINVAL; in stm32_dma3_prep_dma_cyclic()
1391 dst = chan->dma_config.dst_addr; in stm32_dma3_prep_dma_cyclic()
1393 src = chan->dma_config.src_addr; in stm32_dma3_prep_dma_cyclic()
1398 ctr1, ctr2, i == (count - 1), true); in stm32_dma3_prep_dma_cyclic()
1402 swdesc->ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE; in stm32_dma3_prep_dma_cyclic()
1404 swdesc->ccr |= CCR_TCIE; in stm32_dma3_prep_dma_cyclic()
1406 swdesc->cyclic = true; in stm32_dma3_prep_dma_cyclic()
1408 return vchan_tx_prep(&chan->vchan, &swdesc->vdesc, flags); in stm32_dma3_prep_dma_cyclic()
1420 if (!chan->fifo_size) { in stm32_dma3_caps()
1421 caps->max_burst = 0; in stm32_dma3_caps()
1422 caps->src_addr_widths &= ~BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); in stm32_dma3_caps()
1423 caps->dst_addr_widths &= ~BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); in stm32_dma3_caps()
1425 /* Burst transfer should not exceed half of the fifo size */ in stm32_dma3_caps()
1426 caps->max_burst = chan->max_burst; in stm32_dma3_caps()
1427 if (caps->max_burst < DMA_SLAVE_BUSWIDTH_8_BYTES) { in stm32_dma3_caps()
1428 caps->src_addr_widths &= ~BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); in stm32_dma3_caps()
1429 caps->dst_addr_widths &= ~BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); in stm32_dma3_caps()
1434 static int stm32_dma3_config(struct dma_chan *c, struct dma_slave_config *config) in stm32_dma3_config() argument
1438 memcpy(&chan->dma_config, config, sizeof(*config)); in stm32_dma3_config()
1439 chan->config_set |= STM32_DMA3_CFG_SET_DMA; in stm32_dma3_config()
1453 chan->dma_status = DMA_PAUSED; in stm32_dma3_pause()
1455 dev_dbg(chan2dev(chan), "vchan %pK: paused\n", &chan->vchan); in stm32_dma3_pause()
1466 chan->dma_status = DMA_IN_PROGRESS; in stm32_dma3_resume()
1468 dev_dbg(chan2dev(chan), "vchan %pK: resumed\n", &chan->vchan); in stm32_dma3_resume()
1479 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_terminate_all()
1481 if (chan->swdesc) { in stm32_dma3_terminate_all()
1482 vchan_terminate_vdesc(&chan->swdesc->vdesc); in stm32_dma3_terminate_all()
1483 chan->swdesc = NULL; in stm32_dma3_terminate_all()
1488 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma3_terminate_all()
1490 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma3_terminate_all()
1491 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma3_terminate_all()
1493 dev_dbg(chan2dev(chan), "vchan %pK: terminated\n", &chan->vchan); in stm32_dma3_terminate_all()
1502 vchan_synchronize(&chan->vchan); in stm32_dma3_synchronize()
1519 return chan->dma_status; in stm32_dma3_tx_status()
1521 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_tx_status()
1523 vd = vchan_find_desc(&chan->vchan, cookie); in stm32_dma3_tx_status()
1526 else if (chan->swdesc && chan->swdesc->vdesc.tx.cookie == cookie) in stm32_dma3_tx_status()
1527 swdesc = chan->swdesc; in stm32_dma3_tx_status()
1533 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma3_tx_status()
1535 return chan->dma_status; in stm32_dma3_tx_status()
1543 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_issue_pending()
1545 if (vchan_issue_pending(&chan->vchan) && !chan->swdesc) { in stm32_dma3_issue_pending()
1546 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); in stm32_dma3_issue_pending()
1550 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma3_issue_pending()
1561 dev_dbg(c->device->dev, "%s(%s): req_line=%d ch_conf=%08x tr_conf=%08x\n", in stm32_dma3_filter_fn()
1562 __func__, dma_chan_name(c), conf->req_line, conf->ch_conf, conf->tr_conf); in stm32_dma3_filter_fn()
1564 if (!of_property_read_u32(c->device->dev->of_node, "dma-channel-mask", &mask)) in stm32_dma3_filter_fn()
1565 if (!(mask & BIT(chan->id))) in stm32_dma3_filter_fn()
1568 ret = pm_runtime_resume_and_get(ddata->dma_dev.dev); in stm32_dma3_filter_fn()
1571 semcr = readl_relaxed(ddata->base + STM32_DMA3_CSEMCR(chan->id)); in stm32_dma3_filter_fn()
1572 pm_runtime_put_sync(ddata->dma_dev.dev); in stm32_dma3_filter_fn()
1579 if (FIELD_GET(STM32_DMA3_DT_FIFO, conf->ch_conf) != chan->fifo_size) in stm32_dma3_filter_fn()
1587 struct stm32_dma3_ddata *ddata = ofdma->of_dma_data; in stm32_dma3_of_xlate()
1588 dma_cap_mask_t mask = ddata->dma_dev.cap_mask; in stm32_dma3_of_xlate()
1593 if (dma_spec->args_count < 3) { in stm32_dma3_of_xlate()
1594 dev_err(ddata->dma_dev.dev, "Invalid args count\n"); in stm32_dma3_of_xlate()
1598 conf.req_line = dma_spec->args[0]; in stm32_dma3_of_xlate()
1599 conf.ch_conf = dma_spec->args[1]; in stm32_dma3_of_xlate()
1600 conf.tr_conf = dma_spec->args[2]; in stm32_dma3_of_xlate()
1602 if (conf.req_line >= ddata->dma_requests) { in stm32_dma3_of_xlate()
1603 dev_err(ddata->dma_dev.dev, "Invalid request line\n"); in stm32_dma3_of_xlate()
1610 dev_err(ddata->dma_dev.dev, "No suitable channel found\n"); in stm32_dma3_of_xlate()
1615 chan->dt_config = conf; in stm32_dma3_of_xlate()
1616 chan->config_set |= STM32_DMA3_CFG_SET_DT; in stm32_dma3_of_xlate()
1626 chan_reserved = readl_relaxed(ddata->base + STM32_DMA3_SECCFGR); in stm32_dma3_check_rif()
1631 * In case CID filtering is not configured, dma-channel-mask property can be used to in stm32_dma3_check_rif()
1634 of_property_read_u32(ddata->dma_dev.dev->of_node, "dma-channel-mask", &mask); in stm32_dma3_check_rif()
1636 /* Reserve !CID-filtered not in dma-channel-mask, static CID != CID1, CID1 not allowed */ in stm32_dma3_check_rif()
1637 for (i = 0; i < ddata->dma_channels; i++) { in stm32_dma3_check_rif()
1638 ccidcfgr = readl_relaxed(ddata->base + STM32_DMA3_CCIDCFGR(i)); in stm32_dma3_check_rif()
1640 if (!(ccidcfgr & CCIDCFGR_CFEN)) { /* !CID-filtered */ in stm32_dma3_check_rif()
1642 if (!(mask & BIT(i))) /* Not in dma-channel-mask */ in stm32_dma3_check_rif()
1644 } else { /* CID-filtered */ in stm32_dma3_check_rif()
1651 ddata->chans[i].semaphore_mode = true; in stm32_dma3_check_rif()
1654 dev_dbg(ddata->dma_dev.dev, "chan%d: %s mode, %s\n", i, in stm32_dma3_check_rif()
1655 !(ccidcfgr & CCIDCFGR_CFEN) ? "!CID-filtered" : in stm32_dma3_check_rif()
1656 ddata->chans[i].semaphore_mode ? "Semaphore" : "Static CID", in stm32_dma3_check_rif()
1662 dev_warn(ddata->dma_dev.dev, "chan%*pbl have invalid CID configuration\n", in stm32_dma3_check_rif()
1663 ddata->dma_channels, &invalid_cid); in stm32_dma3_check_rif()
1673 { .compatible = "st,stm32mp25-dma3", .data = &stm32mp25_pdata, },
1680 struct device_node *np = pdev->dev.of_node; in stm32_dma3_probe()
1690 ddata = devm_kzalloc(&pdev->dev, sizeof(*ddata), GFP_KERNEL); in stm32_dma3_probe()
1692 return -ENOMEM; in stm32_dma3_probe()
1695 dma_dev = &ddata->dma_dev; in stm32_dma3_probe()
1697 ddata->base = devm_platform_ioremap_resource(pdev, 0); in stm32_dma3_probe()
1698 if (IS_ERR(ddata->base)) in stm32_dma3_probe()
1699 return PTR_ERR(ddata->base); in stm32_dma3_probe()
1701 ddata->clk = devm_clk_get(&pdev->dev, NULL); in stm32_dma3_probe()
1702 if (IS_ERR(ddata->clk)) in stm32_dma3_probe()
1703 return dev_err_probe(&pdev->dev, PTR_ERR(ddata->clk), "Failed to get clk\n"); in stm32_dma3_probe()
1705 reset = devm_reset_control_get_optional(&pdev->dev, NULL); in stm32_dma3_probe()
1707 return dev_err_probe(&pdev->dev, PTR_ERR(reset), "Failed to get reset\n"); in stm32_dma3_probe()
1709 ret = clk_prepare_enable(ddata->clk); in stm32_dma3_probe()
1711 return dev_err_probe(&pdev->dev, ret, "Failed to enable clk\n"); in stm32_dma3_probe()
1715 INIT_LIST_HEAD(&dma_dev->channels); in stm32_dma3_probe()
1717 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in stm32_dma3_probe()
1718 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in stm32_dma3_probe()
1719 dma_cap_set(DMA_CYCLIC, dma_dev->cap_mask); in stm32_dma3_probe()
1720 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in stm32_dma3_probe()
1721 dma_dev->dev = &pdev->dev; in stm32_dma3_probe()
1723 * This controller supports up to 8-byte buswidth depending on the port used and the in stm32_dma3_probe()
1726 dma_dev->copy_align = DMAENGINE_ALIGN_8_BYTES; in stm32_dma3_probe()
1727 dma_dev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in stm32_dma3_probe()
1731 dma_dev->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in stm32_dma3_probe()
1735 dma_dev->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | BIT(DMA_MEM_TO_MEM); in stm32_dma3_probe()
1737 dma_dev->descriptor_reuse = true; in stm32_dma3_probe()
1738 dma_dev->max_sg_burst = STM32_DMA3_MAX_SEG_SIZE; in stm32_dma3_probe()
1739 dma_dev->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in stm32_dma3_probe()
1740 dma_dev->device_alloc_chan_resources = stm32_dma3_alloc_chan_resources; in stm32_dma3_probe()
1741 dma_dev->device_free_chan_resources = stm32_dma3_free_chan_resources; in stm32_dma3_probe()
1742 dma_dev->device_prep_dma_memcpy = stm32_dma3_prep_dma_memcpy; in stm32_dma3_probe()
1743 dma_dev->device_prep_slave_sg = stm32_dma3_prep_slave_sg; in stm32_dma3_probe()
1744 dma_dev->device_prep_dma_cyclic = stm32_dma3_prep_dma_cyclic; in stm32_dma3_probe()
1745 dma_dev->device_caps = stm32_dma3_caps; in stm32_dma3_probe()
1746 dma_dev->device_config = stm32_dma3_config; in stm32_dma3_probe()
1747 dma_dev->device_pause = stm32_dma3_pause; in stm32_dma3_probe()
1748 dma_dev->device_resume = stm32_dma3_resume; in stm32_dma3_probe()
1749 dma_dev->device_terminate_all = stm32_dma3_terminate_all; in stm32_dma3_probe()
1750 dma_dev->device_synchronize = stm32_dma3_synchronize; in stm32_dma3_probe()
1751 dma_dev->device_tx_status = stm32_dma3_tx_status; in stm32_dma3_probe()
1752 dma_dev->device_issue_pending = stm32_dma3_issue_pending; in stm32_dma3_probe()
1755 if (of_property_read_u32(np, "dma-channels", &ddata->dma_channels)) { in stm32_dma3_probe()
1756 hwcfgr = readl_relaxed(ddata->base + STM32_DMA3_HWCFGR1); in stm32_dma3_probe()
1757 ddata->dma_channels = FIELD_GET(G_NUM_CHANNELS, hwcfgr); in stm32_dma3_probe()
1761 if (of_property_read_u32(np, "dma-requests", &ddata->dma_requests)) { in stm32_dma3_probe()
1762 hwcfgr = readl_relaxed(ddata->base + STM32_DMA3_HWCFGR2); in stm32_dma3_probe()
1763 ddata->dma_requests = FIELD_GET(G_MAX_REQ_ID, hwcfgr) + 1; in stm32_dma3_probe()
1767 hwcfgr = readl_relaxed(ddata->base + STM32_DMA3_HWCFGR1); in stm32_dma3_probe()
1770 ddata->ports_max_dw[0] = FIELD_GET(G_M0_DATA_WIDTH_ENC, hwcfgr); in stm32_dma3_probe()
1772 ddata->ports_max_dw[1] = DW_INVALID; in stm32_dma3_probe()
1774 ddata->ports_max_dw[1] = FIELD_GET(G_M1_DATA_WIDTH_ENC, hwcfgr); in stm32_dma3_probe()
1777 ddata->axi_max_burst_len = STM32_DMA3_MAX_BURST_LEN; in stm32_dma3_probe()
1778 pdata = device_get_match_data(&pdev->dev); in stm32_dma3_probe()
1779 if (pdata && pdata->axi_max_burst_len) { in stm32_dma3_probe()
1780 ddata->axi_max_burst_len = min_t(u32, pdata->axi_max_burst_len, in stm32_dma3_probe()
1782 dev_dbg(&pdev->dev, "Burst is limited to %u beats through AXI port\n", in stm32_dma3_probe()
1783 ddata->axi_max_burst_len); in stm32_dma3_probe()
1786 ddata->chans = devm_kcalloc(&pdev->dev, ddata->dma_channels, sizeof(*ddata->chans), in stm32_dma3_probe()
1788 if (!ddata->chans) { in stm32_dma3_probe()
1789 ret = -ENOMEM; in stm32_dma3_probe()
1795 if (chan_reserved == GENMASK(ddata->dma_channels - 1, 0)) { in stm32_dma3_probe()
1796 ret = -ENODEV; in stm32_dma3_probe()
1797 dev_err_probe(&pdev->dev, ret, "No channel available, abort registration\n"); in stm32_dma3_probe()
1802 hwcfgr = readl_relaxed(ddata->base + STM32_DMA3_HWCFGR3); in stm32_dma3_probe()
1803 hwcfgr |= ((u64)readl_relaxed(ddata->base + STM32_DMA3_HWCFGR4)) << 32; in stm32_dma3_probe()
1805 for (i = 0; i < ddata->dma_channels; i++) { in stm32_dma3_probe()
1809 chan = &ddata->chans[i]; in stm32_dma3_probe()
1810 chan->id = i; in stm32_dma3_probe()
1811 chan->fifo_size = get_chan_hwcfg(i, G_FIFO_SIZE(i), hwcfgr); in stm32_dma3_probe()
1812 /* If chan->fifo_size > 0 then half of the fifo size, else no burst when no FIFO */ in stm32_dma3_probe()
1813 chan->max_burst = (chan->fifo_size) ? (1 << (chan->fifo_size + 1)) / 2 : 0; in stm32_dma3_probe()
1820 for (i = 0; i < ddata->dma_channels; i++) { in stm32_dma3_probe()
1826 chan = &ddata->chans[i]; in stm32_dma3_probe()
1827 snprintf(name, sizeof(name), "dma%dchan%d", ddata->dma_dev.dev_id, chan->id); in stm32_dma3_probe()
1829 chan->vchan.desc_free = stm32_dma3_chan_vdesc_free; in stm32_dma3_probe()
1830 vchan_init(&chan->vchan, dma_dev); in stm32_dma3_probe()
1832 ret = dma_async_device_channel_register(&ddata->dma_dev, &chan->vchan.chan, name); in stm32_dma3_probe()
1834 dev_err_probe(&pdev->dev, ret, "Failed to register channel %s\n", name); in stm32_dma3_probe()
1841 chan->irq = ret; in stm32_dma3_probe()
1843 ret = devm_request_irq(&pdev->dev, chan->irq, stm32_dma3_chan_irq, 0, in stm32_dma3_probe()
1846 dev_err_probe(&pdev->dev, ret, "Failed to request channel %s IRQ\n", in stm32_dma3_probe()
1854 dev_err_probe(&pdev->dev, ret, "Failed to register controller\n"); in stm32_dma3_probe()
1858 verr = readl_relaxed(ddata->base + STM32_DMA3_VERR); in stm32_dma3_probe()
1860 pm_runtime_set_active(&pdev->dev); in stm32_dma3_probe()
1861 pm_runtime_enable(&pdev->dev); in stm32_dma3_probe()
1862 pm_runtime_get_noresume(&pdev->dev); in stm32_dma3_probe()
1863 pm_runtime_put(&pdev->dev); in stm32_dma3_probe()
1865 dev_info(&pdev->dev, "STM32 DMA3 registered rev:%lu.%lu\n", in stm32_dma3_probe()
1871 clk_disable_unprepare(ddata->clk); in stm32_dma3_probe()
1878 pm_runtime_disable(&pdev->dev); in stm32_dma3_remove()
1885 clk_disable_unprepare(ddata->clk); in stm32_dma3_runtime_suspend()
1895 ret = clk_prepare_enable(ddata->clk); in stm32_dma3_runtime_resume()
1911 .name = "stm32-dma3",