Lines Matching +full:dma +full:- +full:info

1 // SPDX-License-Identifier: GPL-2.0+
3 * BCM2835 DMA engine support
11 * BCM2708 DMA Driver
17 * MARVELL MMP Peripheral DMA Driver
21 #include <linux/dma-mapping.h>
35 #include "virt-dma.h"
41 * struct bcm2835_dmadev - BCM2835 DMA controller
42 * @ddev: DMA device
54 uint32_t info; member
108 /* DMA CS Control and Status bits */
109 #define BCM2835_DMA_ACTIVE BIT(0) /* activate the DMA */
116 * AXI-write to ack
127 /* Transfer information bits - also bcm2835_cb.info field */
129 #define BCM2835_DMA_TDMODE BIT(1) /* 2D-Mode */
130 #define BCM2835_DMA_WAIT_RESP BIT(3) /* wait for AXI-write to be acked */
138 #define BCM2835_DMA_S_IGNORE BIT(11) /* ignore source reads - read 0 */
141 #define BCM2835_DMA_WAIT(x) ((x & 31) << 21) /* add DMA-wait cycles */
158 /* shared registers for all dma channels */
167 /* Valid only for channels 0 - 14, 15 has its own base address */
171 /* the max dma length for different channels */
173 #define MAX_LITE_DMA_LEN (SZ_64K - 4)
178 return c->is_lite_channel ? MAX_LITE_DMA_LEN : MAX_DMA_LEN; in bcm2835_dma_max_frame_length()
208 for (i = 0; i < desc->frames; i++) in bcm2835_dma_free_cb_chain()
209 dma_pool_free(desc->c->cb_pool, desc->cb_list[i].cb, in bcm2835_dma_free_cb_chain()
210 desc->cb_list[i].paddr); in bcm2835_dma_free_cb_chain()
231 /* set the length taking lite-channel limitations into account */ in bcm2835_dma_create_cb_set_length()
232 control_block->length = min_t(u32, len, max_len); in bcm2835_dma_create_cb_set_length()
241 * multiple of period_len - this is typically in bcm2835_dma_create_cb_set_length()
242 * used to set the interrupt flag in info in bcm2835_dma_create_cb_set_length()
247 if (*total_len + control_block->length < period_len) { in bcm2835_dma_create_cb_set_length()
249 *total_len += control_block->length; in bcm2835_dma_create_cb_set_length()
254 control_block->length = period_len - *total_len; in bcm2835_dma_create_cb_set_length()
259 /* add extrainfo bits in info */ in bcm2835_dma_create_cb_set_length()
260 control_block->info |= finalextrainfo; in bcm2835_dma_create_cb_set_length()
281 * bcm2835_dma_create_cb_chain - create a control block and fills data in
286 * @info: the default info bits to apply per controlblock
289 * in @info, then it gets incremented)
291 * in @info, then it gets incremented)
295 * this will also break some control-blocks early
302 bool cyclic, u32 info, u32 finalextrainfo, size_t frames, in bcm2835_dma_create_cb_chain() argument
321 d->c = c; in bcm2835_dma_create_cb_chain()
322 d->dir = direction; in bcm2835_dma_create_cb_chain()
323 d->cyclic = cyclic; in bcm2835_dma_create_cb_chain()
329 for (frame = 0, total_len = 0; frame < frames; d->frames++, frame++) { in bcm2835_dma_create_cb_chain()
330 cb_entry = &d->cb_list[frame]; in bcm2835_dma_create_cb_chain()
331 cb_entry->cb = dma_pool_alloc(c->cb_pool, gfp, in bcm2835_dma_create_cb_chain()
332 &cb_entry->paddr); in bcm2835_dma_create_cb_chain()
333 if (!cb_entry->cb) in bcm2835_dma_create_cb_chain()
337 control_block = cb_entry->cb; in bcm2835_dma_create_cb_chain()
338 control_block->info = info; in bcm2835_dma_create_cb_chain()
339 control_block->src = src; in bcm2835_dma_create_cb_chain()
340 control_block->dst = dst; in bcm2835_dma_create_cb_chain()
341 control_block->stride = 0; in bcm2835_dma_create_cb_chain()
342 control_block->next = 0; in bcm2835_dma_create_cb_chain()
352 len -= control_block->length; in bcm2835_dma_create_cb_chain()
357 d->cb_list[frame - 1].cb->next = cb_entry->paddr; in bcm2835_dma_create_cb_chain()
360 if (src && (info & BCM2835_DMA_S_INC)) in bcm2835_dma_create_cb_chain()
361 src += control_block->length; in bcm2835_dma_create_cb_chain()
362 if (dst && (info & BCM2835_DMA_D_INC)) in bcm2835_dma_create_cb_chain()
363 dst += control_block->length; in bcm2835_dma_create_cb_chain()
366 d->size += control_block->length; in bcm2835_dma_create_cb_chain()
370 d->cb_list[d->frames - 1].cb->info |= finalextrainfo; in bcm2835_dma_create_cb_chain()
373 if (buf_len && (d->size != buf_len)) in bcm2835_dma_create_cb_chain()
400 addr += cb->cb->length, len -= cb->cb->length, cb++) { in bcm2835_dma_fill_cb_chain_with_sg()
402 cb->cb->dst = addr; in bcm2835_dma_fill_cb_chain_with_sg()
404 cb->cb->src = addr; in bcm2835_dma_fill_cb_chain_with_sg()
405 cb->cb->length = min(len, max_len); in bcm2835_dma_fill_cb_chain_with_sg()
412 void __iomem *chan_base = c->chan_base; in bcm2835_dma_abort()
422 /* Write 0 to the active bit - Pause the DMA */ in bcm2835_dma_abort()
427 BCM2835_DMA_WAITING_FOR_WRITES) && --timeout) in bcm2835_dma_abort()
432 dev_err(c->vc.chan.device->dev, in bcm2835_dma_abort()
440 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in bcm2835_dma_start_desc()
444 c->desc = NULL; in bcm2835_dma_start_desc()
448 list_del(&vd->node); in bcm2835_dma_start_desc()
450 c->desc = d = to_bcm2835_dma_desc(&vd->tx); in bcm2835_dma_start_desc()
452 writel(d->cb_list[0].paddr, c->chan_base + BCM2835_DMA_ADDR); in bcm2835_dma_start_desc()
453 writel(BCM2835_DMA_ACTIVE, c->chan_base + BCM2835_DMA_CS); in bcm2835_dma_start_desc()
463 if (c->irq_flags & IRQF_SHARED) { in bcm2835_dma_callback()
465 flags = readl(c->chan_base + BCM2835_DMA_CS); in bcm2835_dma_callback()
471 spin_lock_irqsave(&c->vc.lock, flags); in bcm2835_dma_callback()
481 c->chan_base + BCM2835_DMA_CS); in bcm2835_dma_callback()
483 d = c->desc; in bcm2835_dma_callback()
486 if (d->cyclic) { in bcm2835_dma_callback()
488 vchan_cyclic_callback(&d->vd); in bcm2835_dma_callback()
489 } else if (!readl(c->chan_base + BCM2835_DMA_ADDR)) { in bcm2835_dma_callback()
490 vchan_cookie_complete(&c->desc->vd); in bcm2835_dma_callback()
495 spin_unlock_irqrestore(&c->vc.lock, flags); in bcm2835_dma_callback()
503 struct device *dev = c->vc.chan.device->dev; in bcm2835_dma_alloc_chan_resources()
505 dev_dbg(dev, "Allocating DMA channel %d\n", c->ch); in bcm2835_dma_alloc_chan_resources()
511 c->cb_pool = dma_pool_create(dev_name(dev), dev, in bcm2835_dma_alloc_chan_resources()
513 if (!c->cb_pool) { in bcm2835_dma_alloc_chan_resources()
515 return -ENOMEM; in bcm2835_dma_alloc_chan_resources()
518 return request_irq(c->irq_number, bcm2835_dma_callback, in bcm2835_dma_alloc_chan_resources()
519 c->irq_flags, "DMA IRQ", c); in bcm2835_dma_alloc_chan_resources()
526 vchan_free_chan_resources(&c->vc); in bcm2835_dma_free_chan_resources()
527 free_irq(c->irq_number, c); in bcm2835_dma_free_chan_resources()
528 dma_pool_destroy(c->cb_pool); in bcm2835_dma_free_chan_resources()
530 dev_dbg(c->vc.chan.device->dev, "Freeing DMA channel %u\n", c->ch); in bcm2835_dma_free_chan_resources()
535 return d->size; in bcm2835_dma_desc_size()
543 for (size = i = 0; i < d->frames; i++) { in bcm2835_dma_desc_size_pos()
544 struct bcm2835_dma_cb *control_block = d->cb_list[i].cb; in bcm2835_dma_desc_size_pos()
545 size_t this_size = control_block->length; in bcm2835_dma_desc_size_pos()
546 dma_addr_t dma; in bcm2835_dma_desc_size_pos() local
548 if (d->dir == DMA_DEV_TO_MEM) in bcm2835_dma_desc_size_pos()
549 dma = control_block->dst; in bcm2835_dma_desc_size_pos()
551 dma = control_block->src; in bcm2835_dma_desc_size_pos()
555 else if (addr >= dma && addr < dma + this_size) in bcm2835_dma_desc_size_pos()
556 size += dma + this_size - addr; in bcm2835_dma_desc_size_pos()
574 spin_lock_irqsave(&c->vc.lock, flags); in bcm2835_dma_tx_status()
575 vd = vchan_find_desc(&c->vc, cookie); in bcm2835_dma_tx_status()
577 txstate->residue = in bcm2835_dma_tx_status()
578 bcm2835_dma_desc_size(to_bcm2835_dma_desc(&vd->tx)); in bcm2835_dma_tx_status()
579 } else if (c->desc && c->desc->vd.tx.cookie == cookie) { in bcm2835_dma_tx_status()
580 struct bcm2835_desc *d = c->desc; in bcm2835_dma_tx_status()
583 if (d->dir == DMA_MEM_TO_DEV) in bcm2835_dma_tx_status()
584 pos = readl(c->chan_base + BCM2835_DMA_SOURCE_AD); in bcm2835_dma_tx_status()
585 else if (d->dir == DMA_DEV_TO_MEM) in bcm2835_dma_tx_status()
586 pos = readl(c->chan_base + BCM2835_DMA_DEST_AD); in bcm2835_dma_tx_status()
590 txstate->residue = bcm2835_dma_desc_size_pos(d, pos); in bcm2835_dma_tx_status()
592 txstate->residue = 0; in bcm2835_dma_tx_status()
595 spin_unlock_irqrestore(&c->vc.lock, flags); in bcm2835_dma_tx_status()
605 spin_lock_irqsave(&c->vc.lock, flags); in bcm2835_dma_issue_pending()
606 if (vchan_issue_pending(&c->vc) && !c->desc) in bcm2835_dma_issue_pending()
609 spin_unlock_irqrestore(&c->vc.lock, flags); in bcm2835_dma_issue_pending()
618 u32 info = BCM2835_DMA_D_INC | BCM2835_DMA_S_INC; in bcm2835_dma_prep_dma_memcpy() local
630 /* allocate the CB chain - this also fills in the pointers */ in bcm2835_dma_prep_dma_memcpy()
632 info, extra, frames, in bcm2835_dma_prep_dma_memcpy()
637 return vchan_tx_prep(&c->vc, &d->vd, flags); in bcm2835_dma_prep_dma_memcpy()
649 u32 info = BCM2835_DMA_WAIT_RESP; in bcm2835_dma_prep_slave_sg() local
654 dev_err(chan->device->dev, in bcm2835_dma_prep_slave_sg()
659 if (c->dreq != 0) in bcm2835_dma_prep_slave_sg()
660 info |= BCM2835_DMA_PER_MAP(c->dreq); in bcm2835_dma_prep_slave_sg()
663 if (c->cfg.src_addr_width != DMA_SLAVE_BUSWIDTH_4_BYTES) in bcm2835_dma_prep_slave_sg()
665 src = c->cfg.src_addr; in bcm2835_dma_prep_slave_sg()
666 info |= BCM2835_DMA_S_DREQ | BCM2835_DMA_D_INC; in bcm2835_dma_prep_slave_sg()
668 if (c->cfg.dst_addr_width != DMA_SLAVE_BUSWIDTH_4_BYTES) in bcm2835_dma_prep_slave_sg()
670 dst = c->cfg.dst_addr; in bcm2835_dma_prep_slave_sg()
671 info |= BCM2835_DMA_D_DREQ | BCM2835_DMA_S_INC; in bcm2835_dma_prep_slave_sg()
679 info, extra, in bcm2835_dma_prep_slave_sg()
686 bcm2835_dma_fill_cb_chain_with_sg(chan, direction, d->cb_list, in bcm2835_dma_prep_slave_sg()
689 return vchan_tx_prep(&c->vc, &d->vd, flags); in bcm2835_dma_prep_slave_sg()
697 struct bcm2835_dmadev *od = to_bcm2835_dma_dev(chan->device); in bcm2835_dma_prep_dma_cyclic()
701 u32 info = BCM2835_DMA_WAIT_RESP; in bcm2835_dma_prep_dma_cyclic() local
708 dev_err(chan->device->dev, "%s: bad direction?\n", __func__); in bcm2835_dma_prep_dma_cyclic()
713 dev_err(chan->device->dev, in bcm2835_dma_prep_dma_cyclic()
724 * warn if buf_len is not a multiple of period_len - this may leed in bcm2835_dma_prep_dma_cyclic()
728 dev_warn_once(chan->device->dev, in bcm2835_dma_prep_dma_cyclic()
733 if (c->dreq != 0) in bcm2835_dma_prep_dma_cyclic()
734 info |= BCM2835_DMA_PER_MAP(c->dreq); in bcm2835_dma_prep_dma_cyclic()
737 if (c->cfg.src_addr_width != DMA_SLAVE_BUSWIDTH_4_BYTES) in bcm2835_dma_prep_dma_cyclic()
739 src = c->cfg.src_addr; in bcm2835_dma_prep_dma_cyclic()
741 info |= BCM2835_DMA_S_DREQ | BCM2835_DMA_D_INC; in bcm2835_dma_prep_dma_cyclic()
743 if (c->cfg.dst_addr_width != DMA_SLAVE_BUSWIDTH_4_BYTES) in bcm2835_dma_prep_dma_cyclic()
745 dst = c->cfg.dst_addr; in bcm2835_dma_prep_dma_cyclic()
747 info |= BCM2835_DMA_D_DREQ | BCM2835_DMA_S_INC; in bcm2835_dma_prep_dma_cyclic()
749 /* non-lite channels can write zeroes w/o accessing memory */ in bcm2835_dma_prep_dma_cyclic()
750 if (buf_addr == od->zero_page && !c->is_lite_channel) in bcm2835_dma_prep_dma_cyclic()
751 info |= BCM2835_DMA_S_IGNORE; in bcm2835_dma_prep_dma_cyclic()
766 info, extra, in bcm2835_dma_prep_dma_cyclic()
773 d->cb_list[d->frames - 1].cb->next = d->cb_list[0].paddr; in bcm2835_dma_prep_dma_cyclic()
775 return vchan_tx_prep(&c->vc, &d->vd, flags); in bcm2835_dma_prep_dma_cyclic()
783 c->cfg = *cfg; in bcm2835_dma_slave_config()
794 spin_lock_irqsave(&c->vc.lock, flags); in bcm2835_dma_terminate_all()
796 /* stop DMA activity */ in bcm2835_dma_terminate_all()
797 if (c->desc) { in bcm2835_dma_terminate_all()
798 vchan_terminate_vdesc(&c->desc->vd); in bcm2835_dma_terminate_all()
799 c->desc = NULL; in bcm2835_dma_terminate_all()
803 vchan_get_all_descriptors(&c->vc, &head); in bcm2835_dma_terminate_all()
804 spin_unlock_irqrestore(&c->vc.lock, flags); in bcm2835_dma_terminate_all()
805 vchan_dma_desc_free_list(&c->vc, &head); in bcm2835_dma_terminate_all()
814 vchan_synchronize(&c->vc); in bcm2835_dma_synchronize()
822 c = devm_kzalloc(d->ddev.dev, sizeof(*c), GFP_KERNEL); in bcm2835_dma_chan_init()
824 return -ENOMEM; in bcm2835_dma_chan_init()
826 c->vc.desc_free = bcm2835_dma_desc_free; in bcm2835_dma_chan_init()
827 vchan_init(&c->vc, &d->ddev); in bcm2835_dma_chan_init()
829 c->chan_base = BCM2835_DMA_CHANIO(d->base, chan_id); in bcm2835_dma_chan_init()
830 c->ch = chan_id; in bcm2835_dma_chan_init()
831 c->irq_number = irq; in bcm2835_dma_chan_init()
832 c->irq_flags = irq_flags; in bcm2835_dma_chan_init()
835 if (readl(c->chan_base + BCM2835_DMA_DEBUG) & in bcm2835_dma_chan_init()
837 c->is_lite_channel = true; in bcm2835_dma_chan_init()
846 list_for_each_entry_safe(c, next, &od->ddev.channels, in bcm2835_dma_free()
848 list_del(&c->vc.chan.device_node); in bcm2835_dma_free()
849 tasklet_kill(&c->vc.task); in bcm2835_dma_free()
852 dma_unmap_page_attrs(od->ddev.dev, od->zero_page, PAGE_SIZE, in bcm2835_dma_free()
857 { .compatible = "brcm,bcm2835-dma", },
865 struct bcm2835_dmadev *d = ofdma->of_dma_data; in bcm2835_dma_xlate()
868 chan = dma_get_any_slave_channel(&d->ddev); in bcm2835_dma_xlate()
873 to_bcm2835_dma_chan(chan)->dreq = spec->args[0]; in bcm2835_dma_xlate()
883 list_for_each_entry_safe(c, next, &od->ddev.channels, in bcm2835_dma_suspend_late()
885 void __iomem *chan_base = c->chan_base; in bcm2835_dma_suspend_late()
887 /* Check if DMA channel is busy */ in bcm2835_dma_suspend_late()
889 return -EBUSY; in bcm2835_dma_suspend_late()
910 if (!pdev->dev.dma_mask) in bcm2835_dma_probe()
911 pdev->dev.dma_mask = &pdev->dev.coherent_dma_mask; in bcm2835_dma_probe()
913 rc = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(32)); in bcm2835_dma_probe()
915 dev_err(&pdev->dev, "Unable to set DMA mask\n"); in bcm2835_dma_probe()
919 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in bcm2835_dma_probe()
921 return -ENOMEM; in bcm2835_dma_probe()
923 dma_set_max_seg_size(&pdev->dev, 0x3FFFFFFF); in bcm2835_dma_probe()
929 od->base = base; in bcm2835_dma_probe()
931 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in bcm2835_dma_probe()
932 dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask); in bcm2835_dma_probe()
933 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in bcm2835_dma_probe()
934 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); in bcm2835_dma_probe()
935 od->ddev.device_alloc_chan_resources = bcm2835_dma_alloc_chan_resources; in bcm2835_dma_probe()
936 od->ddev.device_free_chan_resources = bcm2835_dma_free_chan_resources; in bcm2835_dma_probe()
937 od->ddev.device_tx_status = bcm2835_dma_tx_status; in bcm2835_dma_probe()
938 od->ddev.device_issue_pending = bcm2835_dma_issue_pending; in bcm2835_dma_probe()
939 od->ddev.device_prep_dma_cyclic = bcm2835_dma_prep_dma_cyclic; in bcm2835_dma_probe()
940 od->ddev.device_prep_slave_sg = bcm2835_dma_prep_slave_sg; in bcm2835_dma_probe()
941 od->ddev.device_prep_dma_memcpy = bcm2835_dma_prep_dma_memcpy; in bcm2835_dma_probe()
942 od->ddev.device_config = bcm2835_dma_slave_config; in bcm2835_dma_probe()
943 od->ddev.device_terminate_all = bcm2835_dma_terminate_all; in bcm2835_dma_probe()
944 od->ddev.device_synchronize = bcm2835_dma_synchronize; in bcm2835_dma_probe()
945 od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in bcm2835_dma_probe()
946 od->ddev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in bcm2835_dma_probe()
947 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | in bcm2835_dma_probe()
949 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in bcm2835_dma_probe()
950 od->ddev.descriptor_reuse = true; in bcm2835_dma_probe()
951 od->ddev.dev = &pdev->dev; in bcm2835_dma_probe()
952 INIT_LIST_HEAD(&od->ddev.channels); in bcm2835_dma_probe()
956 od->zero_page = dma_map_page_attrs(od->ddev.dev, ZERO_PAGE(0), 0, in bcm2835_dma_probe()
959 if (dma_mapping_error(od->ddev.dev, od->zero_page)) { in bcm2835_dma_probe()
960 dev_err(&pdev->dev, "Failed to map zero page\n"); in bcm2835_dma_probe()
961 return -ENOMEM; in bcm2835_dma_probe()
964 /* Request DMA channel mask from device tree */ in bcm2835_dma_probe()
965 if (of_property_read_u32(pdev->dev.of_node, in bcm2835_dma_probe()
966 "brcm,dma-channel-mask", in bcm2835_dma_probe()
968 dev_err(&pdev->dev, "Failed to get channel mask\n"); in bcm2835_dma_probe()
969 rc = -EINVAL; in bcm2835_dma_probe()
977 irq[i] = -1; in bcm2835_dma_probe()
982 snprintf(chan_name, sizeof(chan_name), "dma%i", i); in bcm2835_dma_probe()
988 dev_warn_once(&pdev->dev, in bcm2835_dma_probe()
989 "missing interrupt-names property in device tree - legacy interpretation is used\n"); in bcm2835_dma_probe()
1017 dev_dbg(&pdev->dev, "Initialized %i DMA channels\n", i); in bcm2835_dma_probe()
1019 /* Device-tree DMA controller registration */ in bcm2835_dma_probe()
1020 rc = of_dma_controller_register(pdev->dev.of_node, in bcm2835_dma_probe()
1023 dev_err(&pdev->dev, "Failed to register DMA controller\n"); in bcm2835_dma_probe()
1027 rc = dma_async_device_register(&od->ddev); in bcm2835_dma_probe()
1029 dev_err(&pdev->dev, in bcm2835_dma_probe()
1030 "Failed to register slave DMA engine device: %d\n", rc); in bcm2835_dma_probe()
1034 dev_dbg(&pdev->dev, "Load BCM2835 DMA engine driver\n"); in bcm2835_dma_probe()
1047 dma_async_device_unregister(&od->ddev); in bcm2835_dma_remove()
1055 .name = "bcm2835-dma",
1063 MODULE_ALIAS("platform:bcm2835-dma");
1064 MODULE_DESCRIPTION("BCM2835 DMA engine driver");