Lines Matching +full:short +full:- +full:descriptor

1 // SPDX-License-Identifier: GPL-2.0-only
23 #include <linux/dma-mapping.h>
28 #include <linux/percpu-defs.h>
54 if (hsuc->direction == DMA_MEM_TO_DEV) in hsu_chan_enable()
56 else if (hsuc->direction == DMA_DEV_TO_MEM) in hsu_chan_enable()
64 struct dma_slave_config *config = &hsuc->config; in hsu_dma_chan_start()
65 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_chan_start()
70 if (hsuc->direction == DMA_MEM_TO_DEV) { in hsu_dma_chan_start()
71 bsr = config->dst_maxburst; in hsu_dma_chan_start()
72 mtsr = config->dst_addr_width; in hsu_dma_chan_start()
73 } else if (hsuc->direction == DMA_DEV_TO_MEM) { in hsu_dma_chan_start()
74 bsr = config->src_maxburst; in hsu_dma_chan_start()
75 mtsr = config->src_addr_width; in hsu_dma_chan_start()
85 count = desc->nents - desc->active; in hsu_dma_chan_start()
87 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr); in hsu_dma_chan_start()
88 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len); in hsu_dma_chan_start()
94 desc->active++; in hsu_dma_chan_start()
96 /* Only for the last descriptor in the chain */ in hsu_dma_chan_start()
97 dcr |= HSU_CH_DCR_CHSOD(count - 1); in hsu_dma_chan_start()
98 dcr |= HSU_CH_DCR_CHDI(count - 1); in hsu_dma_chan_start()
120 /* Get the next descriptor */ in hsu_dma_start_transfer()
121 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
123 hsuc->desc = NULL; in hsu_dma_start_transfer()
127 list_del(&vdesc->node); in hsu_dma_start_transfer()
128 hsuc->desc = to_hsu_dma_desc(vdesc); in hsu_dma_start_transfer()
130 /* Start the channel with a new descriptor */ in hsu_dma_start_transfer()
135 * hsu_dma_get_status() - get DMA channel status
151 int hsu_dma_get_status(struct hsu_dma_chip *chip, unsigned short nr, in hsu_dma_get_status()
159 if (nr >= chip->hsu->nr_channels) in hsu_dma_get_status()
160 return -EINVAL; in hsu_dma_get_status()
162 hsuc = &chip->hsu->chan[nr]; in hsu_dma_get_status()
168 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
170 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
175 return -EIO; in hsu_dma_get_status()
182 * At this point, at least one of Descriptor Time Out, Channel Error in hsu_dma_get_status()
183 * or Descriptor Done bits must be set. Clear the Descriptor Time Out in hsu_dma_get_status()
184 * bits and if sr is still non-zero, it must be channel error or in hsu_dma_get_status()
185 * descriptor done which are higher priority than timeout and handled in hsu_dma_get_status()
197 * hsu_dma_do_irq() - DMA interrupt handler
203 * This function handles Channel Error and Descriptor Done interrupts.
210 int hsu_dma_do_irq(struct hsu_dma_chip *chip, unsigned short nr, u32 status) in hsu_dma_do_irq()
218 if (nr >= chip->hsu->nr_channels) in hsu_dma_do_irq()
221 hsuc = &chip->hsu->chan[nr]; in hsu_dma_do_irq()
222 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq()
224 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
225 desc = hsuc->desc; in hsu_dma_do_irq()
228 desc->status = DMA_ERROR; in hsu_dma_do_irq()
229 } else if (desc->active < desc->nents) { in hsu_dma_do_irq()
232 vchan_cookie_complete(&desc->vdesc); in hsu_dma_do_irq()
233 desc->status = DMA_COMPLETE; in hsu_dma_do_irq()
234 stat->bytes_transferred += desc->length; in hsu_dma_do_irq()
238 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
252 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc()
253 if (!desc->sg) { in hsu_dma_alloc_desc()
265 kfree(desc->sg); in hsu_dma_desc_free()
284 desc->sg[i].addr = sg_dma_address(sg); in hsu_dma_prep_slave_sg()
285 desc->sg[i].len = sg_dma_len(sg); in hsu_dma_prep_slave_sg()
287 desc->length += sg_dma_len(sg); in hsu_dma_prep_slave_sg()
290 desc->nents = sg_len; in hsu_dma_prep_slave_sg()
291 desc->direction = direction; in hsu_dma_prep_slave_sg()
292 /* desc->active = 0 by kzalloc */ in hsu_dma_prep_slave_sg()
293 desc->status = DMA_IN_PROGRESS; in hsu_dma_prep_slave_sg()
295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
303 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
306 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
311 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_active_desc_size()
315 for (i = desc->active; i < desc->nents; i++) in hsu_dma_active_desc_size()
316 bytes += desc->sg[i].len; in hsu_dma_active_desc_size()
318 i = HSU_DMA_CHAN_NR_DESC - 1; in hsu_dma_active_desc_size()
321 } while (--i >= 0); in hsu_dma_active_desc_size()
339 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
340 vdesc = vchan_find_desc(&hsuc->vchan, cookie); in hsu_dma_tx_status()
341 if (hsuc->desc && cookie == hsuc->desc->vdesc.tx.cookie) { in hsu_dma_tx_status()
344 status = hsuc->desc->status; in hsu_dma_tx_status()
346 bytes = to_hsu_dma_desc(vdesc)->length; in hsu_dma_tx_status()
349 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
359 memcpy(&hsuc->config, config, sizeof(hsuc->config)); in hsu_dma_slave_config()
369 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_pause()
370 if (hsuc->desc && hsuc->desc->status == DMA_IN_PROGRESS) { in hsu_dma_pause()
372 hsuc->desc->status = DMA_PAUSED; in hsu_dma_pause()
374 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_pause()
384 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_resume()
385 if (hsuc->desc && hsuc->desc->status == DMA_PAUSED) { in hsu_dma_resume()
386 hsuc->desc->status = DMA_IN_PROGRESS; in hsu_dma_resume()
389 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_resume()
400 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_terminate_all()
403 if (hsuc->desc) { in hsu_dma_terminate_all()
404 hsu_dma_desc_free(&hsuc->desc->vdesc); in hsu_dma_terminate_all()
405 hsuc->desc = NULL; in hsu_dma_terminate_all()
408 vchan_get_all_descriptors(&hsuc->vchan, &head); in hsu_dma_terminate_all()
409 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_terminate_all()
410 vchan_dma_desc_free_list(&hsuc->vchan, &head); in hsu_dma_terminate_all()
424 vchan_synchronize(&hsuc->vchan); in hsu_dma_synchronize()
430 void __iomem *addr = chip->regs + chip->offset; in hsu_dma_probe()
431 unsigned short i; in hsu_dma_probe()
434 hsu = devm_kzalloc(chip->dev, sizeof(*hsu), GFP_KERNEL); in hsu_dma_probe()
436 return -ENOMEM; in hsu_dma_probe()
438 chip->hsu = hsu; in hsu_dma_probe()
441 hsu->nr_channels = (chip->length - chip->offset) / HSU_DMA_CHAN_LENGTH; in hsu_dma_probe()
443 hsu->chan = devm_kcalloc(chip->dev, hsu->nr_channels, in hsu_dma_probe()
444 sizeof(*hsu->chan), GFP_KERNEL); in hsu_dma_probe()
445 if (!hsu->chan) in hsu_dma_probe()
446 return -ENOMEM; in hsu_dma_probe()
448 INIT_LIST_HEAD(&hsu->dma.channels); in hsu_dma_probe()
449 for (i = 0; i < hsu->nr_channels; i++) { in hsu_dma_probe()
450 struct hsu_dma_chan *hsuc = &hsu->chan[i]; in hsu_dma_probe()
452 hsuc->vchan.desc_free = hsu_dma_desc_free; in hsu_dma_probe()
453 vchan_init(&hsuc->vchan, &hsu->dma); in hsu_dma_probe()
455 hsuc->direction = (i & 0x1) ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV; in hsu_dma_probe()
456 hsuc->reg = addr + i * HSU_DMA_CHAN_LENGTH; in hsu_dma_probe()
459 dma_cap_set(DMA_SLAVE, hsu->dma.cap_mask); in hsu_dma_probe()
460 dma_cap_set(DMA_PRIVATE, hsu->dma.cap_mask); in hsu_dma_probe()
462 hsu->dma.device_free_chan_resources = hsu_dma_free_chan_resources; in hsu_dma_probe()
464 hsu->dma.device_prep_slave_sg = hsu_dma_prep_slave_sg; in hsu_dma_probe()
466 hsu->dma.device_issue_pending = hsu_dma_issue_pending; in hsu_dma_probe()
467 hsu->dma.device_tx_status = hsu_dma_tx_status; in hsu_dma_probe()
469 hsu->dma.device_config = hsu_dma_slave_config; in hsu_dma_probe()
470 hsu->dma.device_pause = hsu_dma_pause; in hsu_dma_probe()
471 hsu->dma.device_resume = hsu_dma_resume; in hsu_dma_probe()
472 hsu->dma.device_terminate_all = hsu_dma_terminate_all; in hsu_dma_probe()
473 hsu->dma.device_synchronize = hsu_dma_synchronize; in hsu_dma_probe()
475 hsu->dma.src_addr_widths = HSU_DMA_BUSWIDTHS; in hsu_dma_probe()
476 hsu->dma.dst_addr_widths = HSU_DMA_BUSWIDTHS; in hsu_dma_probe()
477 hsu->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in hsu_dma_probe()
478 hsu->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in hsu_dma_probe()
480 hsu->dma.dev = chip->dev; in hsu_dma_probe()
482 dma_set_max_seg_size(hsu->dma.dev, HSU_CH_DxTSR_MASK); in hsu_dma_probe()
484 ret = dma_async_device_register(&hsu->dma); in hsu_dma_probe()
488 dev_info(chip->dev, "Found HSU DMA, %d channels\n", hsu->nr_channels); in hsu_dma_probe()
495 struct hsu_dma *hsu = chip->hsu; in hsu_dma_remove()
496 unsigned short i; in hsu_dma_remove()
498 dma_async_device_unregister(&hsu->dma); in hsu_dma_remove()
500 for (i = 0; i < hsu->nr_channels; i++) { in hsu_dma_remove()
501 struct hsu_dma_chan *hsuc = &hsu->chan[i]; in hsu_dma_remove()
503 tasklet_kill(&hsuc->vchan.task); in hsu_dma_remove()