Lines Matching +full:ahb +full:- +full:protocol

1 // SPDX-License-Identifier: GPL-2.0
3 * sl3516-ce-core.c - hardware cryptographic offloader for Storlink SL3516 SoC
16 #include <linux/dma-mapping.h>
28 #include "sl3516-ce.h"
35 ce->tx = dma_alloc_coherent(ce->dev, sz, &ce->dtx, GFP_KERNEL); in sl3516_ce_desc_init()
36 if (!ce->tx) in sl3516_ce_desc_init()
37 return -ENOMEM; in sl3516_ce_desc_init()
38 ce->rx = dma_alloc_coherent(ce->dev, sz, &ce->drx, GFP_KERNEL); in sl3516_ce_desc_init()
39 if (!ce->rx) in sl3516_ce_desc_init()
43 ce->tx[i].frame_ctrl.bits.own = CE_CPU; in sl3516_ce_desc_init()
44 ce->tx[i].next_desc.next_descriptor = ce->dtx + (i + 1) * sizeof(struct descriptor); in sl3516_ce_desc_init()
46 ce->tx[MAXDESC - 1].next_desc.next_descriptor = ce->dtx; in sl3516_ce_desc_init()
49 ce->rx[i].frame_ctrl.bits.own = CE_CPU; in sl3516_ce_desc_init()
50 ce->rx[i].next_desc.next_descriptor = ce->drx + (i + 1) * sizeof(struct descriptor); in sl3516_ce_desc_init()
52 ce->rx[MAXDESC - 1].next_desc.next_descriptor = ce->drx; in sl3516_ce_desc_init()
54 ce->pctrl = dma_alloc_coherent(ce->dev, sizeof(struct pkt_control_ecb), in sl3516_ce_desc_init()
55 &ce->dctrl, GFP_KERNEL); in sl3516_ce_desc_init()
56 if (!ce->pctrl) in sl3516_ce_desc_init()
61 dma_free_coherent(ce->dev, sz, ce->rx, ce->drx); in sl3516_ce_desc_init()
63 dma_free_coherent(ce->dev, sz, ce->tx, ce->dtx); in sl3516_ce_desc_init()
64 return -ENOMEM; in sl3516_ce_desc_init()
71 dma_free_coherent(ce->dev, sz, ce->tx, ce->dtx); in sl3516_ce_free_descs()
72 dma_free_coherent(ce->dev, sz, ce->rx, ce->drx); in sl3516_ce_free_descs()
73 dma_free_coherent(ce->dev, sizeof(struct pkt_control_ecb), ce->pctrl, in sl3516_ce_free_descs()
74 ce->dctrl); in sl3516_ce_free_descs()
84 writel(v, ce->base + IPSEC_TXDMA_CTRL); in start_dma_tx()
96 writel(v, ce->base + IPSEC_RXDMA_CTRL); in start_dma_rx()
103 dd = &ce->tx[ce->ctx]; in get_desc_tx()
104 ce->ctx++; in get_desc_tx()
105 if (ce->ctx >= MAXDESC) in get_desc_tx()
106 ce->ctx = 0; in get_desc_tx()
114 rdd = &ce->rx[ce->crx]; in get_desc_rx()
115 ce->crx++; in get_desc_rx()
116 if (ce->crx >= MAXDESC) in get_desc_rx()
117 ce->crx = 0; in get_desc_rx()
128 ce->stat_req++; in sl3516_ce_run_task()
130 reinit_completion(&ce->complete); in sl3516_ce_run_task()
131 ce->status = 0; in sl3516_ce_run_task()
133 for (i = 0; i < rctx->nr_sgd; i++) { in sl3516_ce_run_task()
134 dev_dbg(ce->dev, "%s handle DST SG %d/%d len=%d\n", __func__, in sl3516_ce_run_task()
135 i, rctx->nr_sgd, rctx->t_dst[i].len); in sl3516_ce_run_task()
137 rdd->buf_adr = rctx->t_dst[i].addr; in sl3516_ce_run_task()
138 rdd->frame_ctrl.bits.buffer_size = rctx->t_dst[i].len; in sl3516_ce_run_task()
139 rdd->frame_ctrl.bits.own = CE_DMA; in sl3516_ce_run_task()
141 rdd->next_desc.bits.eofie = 1; in sl3516_ce_run_task()
143 for (i = 0; i < rctx->nr_sgs; i++) { in sl3516_ce_run_task()
144 dev_dbg(ce->dev, "%s handle SRC SG %d/%d len=%d\n", __func__, in sl3516_ce_run_task()
145 i, rctx->nr_sgs, rctx->t_src[i].len); in sl3516_ce_run_task()
146 rctx->h->algorithm_len = rctx->t_src[i].len; in sl3516_ce_run_task()
149 dd->frame_ctrl.raw = 0; in sl3516_ce_run_task()
150 dd->flag_status.raw = 0; in sl3516_ce_run_task()
151 dd->frame_ctrl.bits.buffer_size = rctx->pctrllen; in sl3516_ce_run_task()
152 dd->buf_adr = ce->dctrl; in sl3516_ce_run_task()
153 dd->flag_status.tx_flag.tqflag = rctx->tqflag; in sl3516_ce_run_task()
154 dd->next_desc.bits.eofie = 0; in sl3516_ce_run_task()
155 dd->next_desc.bits.dec = 0; in sl3516_ce_run_task()
156 dd->next_desc.bits.sof_eof = DESC_FIRST | DESC_LAST; in sl3516_ce_run_task()
157 dd->frame_ctrl.bits.own = CE_DMA; in sl3516_ce_run_task()
160 dd->frame_ctrl.raw = 0; in sl3516_ce_run_task()
161 dd->flag_status.raw = 0; in sl3516_ce_run_task()
162 dd->frame_ctrl.bits.buffer_size = rctx->t_src[i].len; in sl3516_ce_run_task()
163 dd->buf_adr = rctx->t_src[i].addr; in sl3516_ce_run_task()
164 dd->flag_status.tx_flag.tqflag = 0; in sl3516_ce_run_task()
165 dd->next_desc.bits.eofie = 0; in sl3516_ce_run_task()
166 dd->next_desc.bits.dec = 0; in sl3516_ce_run_task()
167 dd->next_desc.bits.sof_eof = DESC_FIRST | DESC_LAST; in sl3516_ce_run_task()
168 dd->frame_ctrl.bits.own = CE_DMA; in sl3516_ce_run_task()
172 wait_for_completion_interruptible_timeout(&ce->complete, in sl3516_ce_run_task()
174 if (ce->status == 0) { in sl3516_ce_run_task()
175 dev_err(ce->dev, "DMA timeout for %s\n", name); in sl3516_ce_run_task()
176 err = -EFAULT; in sl3516_ce_run_task()
178 v = readl(ce->base + IPSEC_STATUS_REG); in sl3516_ce_run_task()
180 dev_err(ce->dev, "IPSEC_STATUS_REG %x\n", v); in sl3516_ce_run_task()
181 err = -EFAULT; in sl3516_ce_run_task()
192 ce->stat_irq++; in ce_irq_handler()
194 v = readl(ce->base + IPSEC_DMA_STATUS); in ce_irq_handler()
195 writel(v, ce->base + IPSEC_DMA_STATUS); in ce_irq_handler()
198 dev_err(ce->dev, "AHB bus Error While Tx !!!\n"); in ce_irq_handler()
200 dev_err(ce->dev, "Tx Descriptor Protocol Error !!!\n"); in ce_irq_handler()
202 dev_err(ce->dev, "AHB bus Error While Rx !!!\n"); in ce_irq_handler()
204 dev_err(ce->dev, "Rx Descriptor Protocol Error !!!\n"); in ce_irq_handler()
207 ce->stat_irq_tx++; in ce_irq_handler()
209 ce->status = 1; in ce_irq_handler()
210 complete(&ce->complete); in ce_irq_handler()
211 ce->stat_irq_rx++; in ce_irq_handler()
225 .cra_driver_name = "ecb-aes-sl3516",
250 struct sl3516_ce_dev *ce = seq->private; in sl3516_ce_debugfs_show()
254 ce->hwrng_stat_req, ce->hwrng_stat_bytes); in sl3516_ce_debugfs_show()
255 seq_printf(seq, "IRQ %lu\n", ce->stat_irq); in sl3516_ce_debugfs_show()
256 seq_printf(seq, "IRQ TX %lu\n", ce->stat_irq_tx); in sl3516_ce_debugfs_show()
257 seq_printf(seq, "IRQ RX %lu\n", ce->stat_irq_rx); in sl3516_ce_debugfs_show()
258 seq_printf(seq, "nreq %lu\n", ce->stat_req); in sl3516_ce_debugfs_show()
259 seq_printf(seq, "fallback SG count TX %lu\n", ce->fallback_sg_count_tx); in sl3516_ce_debugfs_show()
260 seq_printf(seq, "fallback SG count RX %lu\n", ce->fallback_sg_count_rx); in sl3516_ce_debugfs_show()
261 seq_printf(seq, "fallback modulo16 %lu\n", ce->fallback_mod16); in sl3516_ce_debugfs_show()
262 seq_printf(seq, "fallback align16 %lu\n", ce->fallback_align16); in sl3516_ce_debugfs_show()
263 seq_printf(seq, "fallback not same len %lu\n", ce->fallback_not_same_len); in sl3516_ce_debugfs_show()
291 dev_info(ce->dev, "DEBUG: Register %s\n", in sl3516_ce_register_algs()
295 dev_err(ce->dev, "Fail to register %s\n", in sl3516_ce_register_algs()
303 dev_err(ce->dev, "ERROR: tried to register an unknown algo\n"); in sl3516_ce_register_algs()
318 dev_info(ce->dev, "Unregister %d %s\n", i, in sl3516_ce_unregister_algs()
328 ce->ctx = 0; in sl3516_ce_start()
329 ce->crx = 0; in sl3516_ce_start()
330 writel(ce->dtx, ce->base + IPSEC_TXDMA_CURR_DESC); in sl3516_ce_start()
331 writel(ce->drx, ce->base + IPSEC_RXDMA_CURR_DESC); in sl3516_ce_start()
332 writel(0, ce->base + IPSEC_DMA_STATUS); in sl3516_ce_start()
343 reset_control_assert(ce->reset); in sl3516_ce_pm_suspend()
344 clk_disable_unprepare(ce->clks); in sl3516_ce_pm_suspend()
353 err = clk_prepare_enable(ce->clks); in sl3516_ce_pm_resume()
355 dev_err(ce->dev, "Cannot prepare_enable\n"); in sl3516_ce_pm_resume()
358 err = reset_control_deassert(ce->reset); in sl3516_ce_pm_resume()
360 dev_err(ce->dev, "Cannot deassert reset control\n"); in sl3516_ce_pm_resume()
380 pm_runtime_use_autosuspend(ce->dev); in sl3516_ce_pm_init()
381 pm_runtime_set_autosuspend_delay(ce->dev, 2000); in sl3516_ce_pm_init()
383 err = pm_runtime_set_suspended(ce->dev); in sl3516_ce_pm_init()
386 pm_runtime_enable(ce->dev); in sl3516_ce_pm_init()
392 pm_runtime_disable(ce->dev); in sl3516_ce_pm_exit()
401 ce = devm_kzalloc(&pdev->dev, sizeof(*ce), GFP_KERNEL); in sl3516_ce_probe()
403 return -ENOMEM; in sl3516_ce_probe()
405 ce->dev = &pdev->dev; in sl3516_ce_probe()
408 ce->base = devm_platform_ioremap_resource(pdev, 0); in sl3516_ce_probe()
409 if (IS_ERR(ce->base)) in sl3516_ce_probe()
410 return PTR_ERR(ce->base); in sl3516_ce_probe()
416 err = devm_request_irq(&pdev->dev, irq, ce_irq_handler, 0, "crypto", ce); in sl3516_ce_probe()
418 dev_err(ce->dev, "Cannot request Crypto Engine IRQ (err=%d)\n", err); in sl3516_ce_probe()
422 ce->reset = devm_reset_control_get(&pdev->dev, NULL); in sl3516_ce_probe()
423 if (IS_ERR(ce->reset)) in sl3516_ce_probe()
424 return dev_err_probe(&pdev->dev, PTR_ERR(ce->reset), in sl3516_ce_probe()
426 ce->clks = devm_clk_get(ce->dev, NULL); in sl3516_ce_probe()
427 if (IS_ERR(ce->clks)) { in sl3516_ce_probe()
428 err = PTR_ERR(ce->clks); in sl3516_ce_probe()
429 dev_err(ce->dev, "Cannot get clock err=%d\n", err); in sl3516_ce_probe()
441 init_completion(&ce->complete); in sl3516_ce_probe()
443 ce->engine = crypto_engine_alloc_init(ce->dev, true); in sl3516_ce_probe()
444 if (!ce->engine) { in sl3516_ce_probe()
445 dev_err(ce->dev, "Cannot allocate engine\n"); in sl3516_ce_probe()
446 err = -ENOMEM; in sl3516_ce_probe()
450 err = crypto_engine_start(ce->engine); in sl3516_ce_probe()
452 dev_err(ce->dev, "Cannot start engine\n"); in sl3516_ce_probe()
464 err = pm_runtime_resume_and_get(ce->dev); in sl3516_ce_probe()
468 v = readl(ce->base + IPSEC_ID); in sl3516_ce_probe()
469 dev_info(ce->dev, "SL3516 dev %lx rev %lx\n", in sl3516_ce_probe()
472 v = readl(ce->base + IPSEC_DMA_DEVICE_ID); in sl3516_ce_probe()
473 dev_info(ce->dev, "SL3516 DMA dev %lx rev %lx\n", in sl3516_ce_probe()
477 pm_runtime_put_sync(ce->dev); in sl3516_ce_probe()
489 ce->dbgfs_dir = dbgfs_dir; in sl3516_ce_probe()
490 ce->dbgfs_stats = dbgfs_stats; in sl3516_ce_probe()
500 crypto_engine_exit(ce->engine); in sl3516_ce_probe()
514 crypto_engine_exit(ce->engine); in sl3516_ce_remove()
519 debugfs_remove_recursive(ce->dbgfs_dir); in sl3516_ce_remove()
524 { .compatible = "cortina,sl3516-crypto"},
533 .name = "sl3516-crypto",