Lines Matching +full:ast2600 +full:- +full:acry
1 // SPDX-License-Identifier: GPL-2.0+
11 #include <linux/dma-mapping.h>
25 dev_info((d)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
28 dev_dbg((d)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
33 * ACRY register definitions *
36 #define ASPEED_ACRY_TRIGGER 0x000 /* ACRY Engine Control: trigger */
37 #define ASPEED_ACRY_DMA_CMD 0x048 /* ACRY Engine Control: Command */
38 #define ASPEED_ACRY_DMA_SRC_BASE 0x04C /* ACRY DRAM base address for DMA */
39 #define ASPEED_ACRY_DMA_LEN 0x050 /* ACRY Data Length of DMA */
40 #define ASPEED_ACRY_RSA_KEY_LEN 0x058 /* ACRY RSA Exp/Mod Key Length (Bits) */
41 #define ASPEED_ACRY_INT_MASK 0x3F8 /* ACRY Interrupt Mask */
42 #define ASPEED_ACRY_STATUS 0x3FC /* ACRY Interrupt Status */
57 /* acry isr */
61 #define ASPEED_ACRY_SRAM_MAX_LEN 2048 /* ACRY SRAM maximum length (Bytes) */
62 #define ASPEED_ACRY_RSA_MAX_KEY_LEN 512 /* ACRY RSA maximum key length (Bytes) */
75 #define ast_acry_write(acry, val, offset) \ argument
76 writel((val), (acry)->regs + (offset))
78 #define ast_acry_read(acry, offset) \ argument
79 readl((acry)->regs + (offset))
97 /* ACRY output SRAM buffer */
100 /* ACRY input DMA buffer */
106 /* ACRY SRAM memory mapped */
152 akcipher_request_set_tfm(req, ctx->fallback_tfm); in aspeed_acry_do_fallback()
154 if (ctx->enc) in aspeed_acry_do_fallback()
169 return ctx->key.n_sz > ASPEED_ACRY_RSA_MAX_KEY_LEN; in aspeed_acry_need_fallback()
180 return crypto_transfer_akcipher_request_to_engine(acry_dev->crypt_engine_rsa, req); in aspeed_acry_handle_queue()
188 struct aspeed_acry_dev *acry_dev = ctx->acry_dev; in aspeed_acry_do_request()
190 acry_dev->req = req; in aspeed_acry_do_request()
191 acry_dev->flags |= CRYPTO_FLAGS_BUSY; in aspeed_acry_do_request()
193 return ctx->trigger(acry_dev); in aspeed_acry_do_request()
198 struct akcipher_request *req = acry_dev->req; in aspeed_acry_complete()
200 acry_dev->flags &= ~CRYPTO_FLAGS_BUSY; in aspeed_acry_complete()
202 crypto_finalize_akcipher_request(acry_dev->crypt_engine_rsa, req, err); in aspeed_acry_complete()
222 for (j = nbytes - 1; j >= 0; j--) { in aspeed_acry_rsa_sg_copy_to_buffer()
223 data_idx = acry_dev->data_byte_mapping[i]; in aspeed_acry_rsa_sg_copy_to_buffer()
229 data_idx = acry_dev->data_byte_mapping[i]; in aspeed_acry_rsa_sg_copy_to_buffer()
238 * - mode 0 : Exponential
239 * - mode 1 : Modulus
242 * - DRAM memory layout:
244 * - ACRY SRAM memory layout should reverse the order of source data:
260 return -ENOMEM; in aspeed_acry_rsa_ctx_copy()
265 nbytes--; in aspeed_acry_rsa_ctx_copy()
270 nbits -= count_leading_zeros(src[0]) - (BITS_PER_LONG - 8); in aspeed_acry_rsa_ctx_copy()
272 /* double-world alignment */ in aspeed_acry_rsa_ctx_copy()
276 i = BYTES_PER_DWORD - nbytes % BYTES_PER_DWORD; in aspeed_acry_rsa_ctx_copy()
279 for (j = ndw; j > 0; j--) { in aspeed_acry_rsa_ctx_copy()
288 idx = acry_dev->exp_dw_mapping[j - 1]; in aspeed_acry_rsa_ctx_copy()
290 idx = acry_dev->mod_dw_mapping[j - 1]; in aspeed_acry_rsa_ctx_copy()
301 struct akcipher_request *req = acry_dev->req; in aspeed_acry_rsa_transfer()
302 u8 __iomem *sram_buffer = acry_dev->acry_sram; in aspeed_acry_rsa_transfer()
303 struct scatterlist *out_sg = req->dst; in aspeed_acry_rsa_transfer()
313 /* Disable ACRY SRAM protection */ in aspeed_acry_rsa_transfer()
314 regmap_update_bits(acry_dev->ahbc, AHBC_REGION_PROT, in aspeed_acry_rsa_transfer()
319 for (j = ASPEED_ACRY_SRAM_MAX_LEN - 1; j >= 0; j--) { in aspeed_acry_rsa_transfer()
320 data_idx = acry_dev->data_byte_mapping[j]; in aspeed_acry_rsa_transfer()
322 result_nbytes--; in aspeed_acry_rsa_transfer()
330 ACRY_DBG(acry_dev, "result_nbytes:%d, req->dst_len:%d\n", in aspeed_acry_rsa_transfer()
331 result_nbytes, req->dst_len); in aspeed_acry_rsa_transfer()
333 if (result_nbytes <= req->dst_len) { in aspeed_acry_rsa_transfer()
336 req->dst_len = result_nbytes; in aspeed_acry_rsa_transfer()
339 dev_err(acry_dev->dev, "RSA engine error!\n"); in aspeed_acry_rsa_transfer()
342 memzero_explicit(acry_dev->buf_addr, ASPEED_ACRY_BUFF_SIZE); in aspeed_acry_rsa_transfer()
349 struct akcipher_request *req = acry_dev->req; in aspeed_acry_rsa_trigger()
354 if (!ctx->n || !ctx->n_sz) { in aspeed_acry_rsa_trigger()
355 dev_err(acry_dev->dev, "%s: key n is not set\n", __func__); in aspeed_acry_rsa_trigger()
356 return -EINVAL; in aspeed_acry_rsa_trigger()
359 memzero_explicit(acry_dev->buf_addr, ASPEED_ACRY_BUFF_SIZE); in aspeed_acry_rsa_trigger()
362 aspeed_acry_rsa_sg_copy_to_buffer(acry_dev, acry_dev->buf_addr, in aspeed_acry_rsa_trigger()
363 req->src, req->src_len); in aspeed_acry_rsa_trigger()
365 nm = aspeed_acry_rsa_ctx_copy(acry_dev, acry_dev->buf_addr, ctx->n, in aspeed_acry_rsa_trigger()
366 ctx->n_sz, ASPEED_RSA_MOD_MODE); in aspeed_acry_rsa_trigger()
367 if (ctx->enc) { in aspeed_acry_rsa_trigger()
368 if (!ctx->e || !ctx->e_sz) { in aspeed_acry_rsa_trigger()
369 dev_err(acry_dev->dev, "%s: key e is not set\n", in aspeed_acry_rsa_trigger()
371 return -EINVAL; in aspeed_acry_rsa_trigger()
374 ne = aspeed_acry_rsa_ctx_copy(acry_dev, acry_dev->buf_addr, in aspeed_acry_rsa_trigger()
375 ctx->e, ctx->e_sz, in aspeed_acry_rsa_trigger()
378 if (!ctx->d || !ctx->d_sz) { in aspeed_acry_rsa_trigger()
379 dev_err(acry_dev->dev, "%s: key d is not set\n", in aspeed_acry_rsa_trigger()
381 return -EINVAL; in aspeed_acry_rsa_trigger()
384 ne = aspeed_acry_rsa_ctx_copy(acry_dev, acry_dev->buf_addr, in aspeed_acry_rsa_trigger()
385 ctx->key.d, ctx->key.d_sz, in aspeed_acry_rsa_trigger()
389 ast_acry_write(acry_dev, acry_dev->buf_dma_addr, in aspeed_acry_rsa_trigger()
396 acry_dev->resume = aspeed_acry_rsa_transfer; in aspeed_acry_rsa_trigger()
398 /* Enable ACRY SRAM protection */ in aspeed_acry_rsa_trigger()
399 regmap_update_bits(acry_dev->ahbc, AHBC_REGION_PROT, in aspeed_acry_rsa_trigger()
417 struct aspeed_acry_dev *acry_dev = ctx->acry_dev; in aspeed_acry_rsa_enc()
419 ctx->trigger = aspeed_acry_rsa_trigger; in aspeed_acry_rsa_enc()
420 ctx->enc = 1; in aspeed_acry_rsa_enc()
429 struct aspeed_acry_dev *acry_dev = ctx->acry_dev; in aspeed_acry_rsa_dec()
431 ctx->trigger = aspeed_acry_rsa_trigger; in aspeed_acry_rsa_dec()
432 ctx->enc = 0; in aspeed_acry_rsa_dec()
445 ctx->n_sz = len; in aspeed_rsa_set_n()
446 ctx->n = aspeed_rsa_key_copy(value, len); in aspeed_rsa_set_n()
447 if (!ctx->n) in aspeed_rsa_set_n()
448 return -ENOMEM; in aspeed_rsa_set_n()
456 ctx->e_sz = len; in aspeed_rsa_set_e()
457 ctx->e = aspeed_rsa_key_copy(value, len); in aspeed_rsa_set_e()
458 if (!ctx->e) in aspeed_rsa_set_e()
459 return -ENOMEM; in aspeed_rsa_set_e()
467 ctx->d_sz = len; in aspeed_rsa_set_d()
468 ctx->d = aspeed_rsa_key_copy(value, len); in aspeed_rsa_set_d()
469 if (!ctx->d) in aspeed_rsa_set_d()
470 return -ENOMEM; in aspeed_rsa_set_d()
477 kfree_sensitive(ctx->n); in aspeed_rsa_key_free()
478 kfree_sensitive(ctx->e); in aspeed_rsa_key_free()
479 kfree_sensitive(ctx->d); in aspeed_rsa_key_free()
480 ctx->n_sz = 0; in aspeed_rsa_key_free()
481 ctx->e_sz = 0; in aspeed_rsa_key_free()
482 ctx->d_sz = 0; in aspeed_rsa_key_free()
489 struct aspeed_acry_dev *acry_dev = ctx->acry_dev; in aspeed_acry_rsa_setkey()
493 ret = rsa_parse_priv_key(&ctx->key, key, keylen); in aspeed_acry_rsa_setkey()
495 ret = rsa_parse_pub_key(&ctx->key, key, keylen); in aspeed_acry_rsa_setkey()
498 dev_err(acry_dev->dev, "rsa parse key failed, ret:0x%x\n", in aspeed_acry_rsa_setkey()
506 if (ctx->key.n_sz > ASPEED_ACRY_RSA_MAX_KEY_LEN) in aspeed_acry_rsa_setkey()
509 ret = aspeed_rsa_set_n(ctx, (u8 *)ctx->key.n, ctx->key.n_sz); in aspeed_acry_rsa_setkey()
513 ret = aspeed_rsa_set_e(ctx, (u8 *)ctx->key.e, ctx->key.e_sz); in aspeed_acry_rsa_setkey()
518 ret = aspeed_rsa_set_d(ctx, (u8 *)ctx->key.d, ctx->key.d_sz); in aspeed_acry_rsa_setkey()
526 dev_err(acry_dev->dev, "rsa set key failed\n"); in aspeed_acry_rsa_setkey()
539 ret = crypto_akcipher_set_pub_key(ctx->fallback_tfm, key, keylen); in aspeed_acry_rsa_set_pub_key()
553 ret = crypto_akcipher_set_priv_key(ctx->fallback_tfm, key, keylen); in aspeed_acry_rsa_set_priv_key()
564 if (ctx->key.n_sz > ASPEED_ACRY_RSA_MAX_KEY_LEN) in aspeed_acry_rsa_max_size()
565 return crypto_akcipher_maxsize(ctx->fallback_tfm); in aspeed_acry_rsa_max_size()
567 return ctx->n_sz; in aspeed_acry_rsa_max_size()
574 const char *name = crypto_tfm_alg_name(&tfm->base); in aspeed_acry_rsa_init_tfm()
579 ctx->acry_dev = acry_alg->acry_dev; in aspeed_acry_rsa_init_tfm()
581 ctx->fallback_tfm = crypto_alloc_akcipher(name, 0, CRYPTO_ALG_ASYNC | in aspeed_acry_rsa_init_tfm()
583 if (IS_ERR(ctx->fallback_tfm)) { in aspeed_acry_rsa_init_tfm()
584 dev_err(ctx->acry_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n", in aspeed_acry_rsa_init_tfm()
585 name, PTR_ERR(ctx->fallback_tfm)); in aspeed_acry_rsa_init_tfm()
586 return PTR_ERR(ctx->fallback_tfm); in aspeed_acry_rsa_init_tfm()
596 crypto_free_akcipher(ctx->fallback_tfm); in aspeed_acry_rsa_exit_tfm()
611 .cra_driver_name = "aspeed-rsa",
649 /* ACRY interrupt service routine. */
664 if (acry_dev->flags & CRYPTO_FLAGS_BUSY) in aspeed_acry_irq()
665 tasklet_schedule(&acry_dev->done_task); in aspeed_acry_irq()
667 dev_err(acry_dev->dev, "RSA no active requests.\n"); in aspeed_acry_irq()
674 * ACRY SRAM has its own memory layout.
682 acry_dev->exp_dw_mapping[i] = j; in aspeed_acry_sram_mapping()
683 acry_dev->mod_dw_mapping[i] = j + 4; in aspeed_acry_sram_mapping()
684 acry_dev->data_byte_mapping[(i * 4)] = (j + 8) * 4; in aspeed_acry_sram_mapping()
685 acry_dev->data_byte_mapping[(i * 4) + 1] = (j + 8) * 4 + 1; in aspeed_acry_sram_mapping()
686 acry_dev->data_byte_mapping[(i * 4) + 2] = (j + 8) * 4 + 2; in aspeed_acry_sram_mapping()
687 acry_dev->data_byte_mapping[(i * 4) + 3] = (j + 8) * 4 + 3; in aspeed_acry_sram_mapping()
697 (void)acry_dev->resume(acry_dev); in aspeed_acry_done_task()
701 { .compatible = "aspeed,ast2600-acry", },
708 struct device *dev = &pdev->dev; in aspeed_acry_probe()
714 return -ENOMEM; in aspeed_acry_probe()
716 acry_dev->dev = dev; in aspeed_acry_probe()
720 acry_dev->regs = devm_platform_ioremap_resource(pdev, 0); in aspeed_acry_probe()
721 if (IS_ERR(acry_dev->regs)) in aspeed_acry_probe()
722 return PTR_ERR(acry_dev->regs); in aspeed_acry_probe()
724 acry_dev->acry_sram = devm_platform_ioremap_resource(pdev, 1); in aspeed_acry_probe()
725 if (IS_ERR(acry_dev->acry_sram)) in aspeed_acry_probe()
726 return PTR_ERR(acry_dev->acry_sram); in aspeed_acry_probe()
729 acry_dev->irq = platform_get_irq(pdev, 0); in aspeed_acry_probe()
730 if (acry_dev->irq < 0) in aspeed_acry_probe()
731 return -ENXIO; in aspeed_acry_probe()
733 rc = devm_request_irq(dev, acry_dev->irq, aspeed_acry_irq, 0, in aspeed_acry_probe()
740 acry_dev->clk = devm_clk_get_enabled(dev, NULL); in aspeed_acry_probe()
741 if (IS_ERR(acry_dev->clk)) { in aspeed_acry_probe()
742 dev_err(dev, "Failed to get acry clk\n"); in aspeed_acry_probe()
743 return PTR_ERR(acry_dev->clk); in aspeed_acry_probe()
746 acry_dev->ahbc = syscon_regmap_lookup_by_phandle(dev->of_node, in aspeed_acry_probe()
748 if (IS_ERR(acry_dev->ahbc)) { in aspeed_acry_probe()
750 return -ENODEV; in aspeed_acry_probe()
754 acry_dev->crypt_engine_rsa = crypto_engine_alloc_init(dev, true); in aspeed_acry_probe()
755 if (!acry_dev->crypt_engine_rsa) { in aspeed_acry_probe()
756 rc = -ENOMEM; in aspeed_acry_probe()
760 rc = crypto_engine_start(acry_dev->crypt_engine_rsa); in aspeed_acry_probe()
764 tasklet_init(&acry_dev->done_task, aspeed_acry_done_task, in aspeed_acry_probe()
770 /* Initialize ACRY SRAM index */ in aspeed_acry_probe()
773 acry_dev->buf_addr = dmam_alloc_coherent(dev, ASPEED_ACRY_BUFF_SIZE, in aspeed_acry_probe()
774 &acry_dev->buf_dma_addr, in aspeed_acry_probe()
776 if (!acry_dev->buf_addr) { in aspeed_acry_probe()
777 rc = -ENOMEM; in aspeed_acry_probe()
783 dev_info(dev, "Aspeed ACRY Accelerator successfully registered\n"); in aspeed_acry_probe()
788 crypto_engine_exit(acry_dev->crypt_engine_rsa); in aspeed_acry_probe()
790 clk_disable_unprepare(acry_dev->clk); in aspeed_acry_probe()
800 crypto_engine_exit(acry_dev->crypt_engine_rsa); in aspeed_acry_remove()
801 tasklet_kill(&acry_dev->done_task); in aspeed_acry_remove()
802 clk_disable_unprepare(acry_dev->clk); in aspeed_acry_remove()
819 MODULE_DESCRIPTION("ASPEED ACRY driver for hardware RSA Engine");