Lines Matching +full:aes +full:- +full:cmac
1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
8 #include <linux/dma-mapping.h>
14 #include <crypto/aes.h>
23 #include "tegra-se.h"
93 /* increment counter (128-bit int) */
97 --bits; in ctr_iv_inc()
109 offset = req->cryptlen - ctx->ivsize; in tegra_cbc_iv_copyback()
111 if (rctx->encrypt) in tegra_cbc_iv_copyback()
112 memcpy(req->iv, rctx->datbuf.buf + offset, ctx->ivsize); in tegra_cbc_iv_copyback()
114 scatterwalk_map_and_copy(req->iv, req->src, offset, ctx->ivsize, 0); in tegra_cbc_iv_copyback()
121 if (ctx->alg == SE_ALG_CBC) { in tegra_aes_update_iv()
123 } else if (ctx->alg == SE_ALG_CTR) { in tegra_aes_update_iv()
124 num = req->cryptlen / ctx->ivsize; in tegra_aes_update_iv()
125 if (req->cryptlen % ctx->ivsize) in tegra_aes_update_iv()
128 ctr_iv_inc(req->iv, ctx->ivsize, num); in tegra_aes_update_iv()
165 return -EINVAL; in tegra234_aes_crypto_cfg()
205 return -EINVAL; in tegra234_aes_cfg()
212 struct tegra_se *se = ctx->se; in tegra_aes_prep_cmd()
213 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_aes_prep_cmd()
214 dma_addr_t addr = rctx->datbuf.addr; in tegra_aes_prep_cmd()
216 data_count = rctx->len / AES_BLOCK_SIZE; in tegra_aes_prep_cmd()
217 res_bits = (rctx->len % AES_BLOCK_SIZE) * 8; in tegra_aes_prep_cmd()
224 data_count--; in tegra_aes_prep_cmd()
226 if (rctx->iv) { in tegra_aes_prep_cmd()
228 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_aes_prep_cmd()
230 cpuvaddr[i++] = rctx->iv[j]; in tegra_aes_prep_cmd()
233 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_aes_prep_cmd()
237 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_aes_prep_cmd()
238 cpuvaddr[i++] = rctx->config; in tegra_aes_prep_cmd()
239 cpuvaddr[i++] = rctx->crypto_config; in tegra_aes_prep_cmd()
243 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(addr)) | SE_ADDR_HI_SZ(rctx->len); in tegra_aes_prep_cmd()
248 SE_ADDR_HI_SZ(rctx->len); in tegra_aes_prep_cmd()
250 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_aes_prep_cmd()
256 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_aes_prep_cmd()
258 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); in tegra_aes_prep_cmd()
268 struct tegra_se *se = ctx->se; in tegra_aes_do_one_req()
272 rctx->iv = (ctx->alg == SE_ALG_ECB) ? NULL : (u32 *)req->iv; in tegra_aes_do_one_req()
273 rctx->len = req->cryptlen; in tegra_aes_do_one_req()
274 key1_id = ctx->key1_id; in tegra_aes_do_one_req()
275 key2_id = ctx->key2_id; in tegra_aes_do_one_req()
277 /* Pad input to AES Block size */ in tegra_aes_do_one_req()
278 if (ctx->alg != SE_ALG_XTS) { in tegra_aes_do_one_req()
279 if (rctx->len % AES_BLOCK_SIZE) in tegra_aes_do_one_req()
280 rctx->len += AES_BLOCK_SIZE - (rctx->len % AES_BLOCK_SIZE); in tegra_aes_do_one_req()
283 rctx->datbuf.size = rctx->len; in tegra_aes_do_one_req()
284 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, in tegra_aes_do_one_req()
285 &rctx->datbuf.addr, GFP_KERNEL); in tegra_aes_do_one_req()
286 if (!rctx->datbuf.buf) { in tegra_aes_do_one_req()
287 ret = -ENOMEM; in tegra_aes_do_one_req()
291 scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0); in tegra_aes_do_one_req()
293 rctx->config = tegra234_aes_cfg(ctx->alg, rctx->encrypt); in tegra_aes_do_one_req()
294 rctx->crypto_config = tegra234_aes_crypto_cfg(ctx->alg, rctx->encrypt); in tegra_aes_do_one_req()
297 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key1, in tegra_aes_do_one_req()
298 ctx->keylen, ctx->alg, &key1_id); in tegra_aes_do_one_req()
303 rctx->crypto_config |= SE_AES_KEY_INDEX(key1_id); in tegra_aes_do_one_req()
305 if (ctx->alg == SE_ALG_XTS) { in tegra_aes_do_one_req()
307 ret = tegra_key_submit_reserved_xts(ctx->se, ctx->key2, in tegra_aes_do_one_req()
308 ctx->keylen, ctx->alg, &key2_id); in tegra_aes_do_one_req()
313 rctx->crypto_config |= SE_AES_KEY2_INDEX(key2_id); in tegra_aes_do_one_req()
318 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_aes_do_one_req()
322 scatterwalk_map_and_copy(rctx->datbuf.buf, req->dst, 0, req->cryptlen, 1); in tegra_aes_do_one_req()
326 dma_free_coherent(ctx->se->dev, rctx->datbuf.size, in tegra_aes_do_one_req()
327 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_aes_do_one_req()
330 tegra_key_invalidate_reserved(ctx->se, key1_id, ctx->alg); in tegra_aes_do_one_req()
333 tegra_key_invalidate_reserved(ctx->se, key2_id, ctx->alg); in tegra_aes_do_one_req()
336 crypto_finalize_skcipher_request(se->engine, req, ret); in tegra_aes_do_one_req()
353 ctx->ivsize = crypto_skcipher_ivsize(tfm); in tegra_aes_cra_init()
354 ctx->se = se_alg->se_dev; in tegra_aes_cra_init()
355 ctx->key1_id = 0; in tegra_aes_cra_init()
356 ctx->key2_id = 0; in tegra_aes_cra_init()
357 ctx->keylen = 0; in tegra_aes_cra_init()
359 algname = crypto_tfm_alg_name(&tfm->base); in tegra_aes_cra_init()
362 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_aes_cra_init()
366 ctx->alg = ret; in tegra_aes_cra_init()
373 struct tegra_aes_ctx *ctx = crypto_tfm_ctx(&tfm->base); in tegra_aes_cra_exit()
375 if (ctx->key1_id) in tegra_aes_cra_exit()
376 tegra_key_invalidate(ctx->se, ctx->key1_id, ctx->alg); in tegra_aes_cra_exit()
378 if (ctx->key2_id) in tegra_aes_cra_exit()
379 tegra_key_invalidate(ctx->se, ctx->key2_id, ctx->alg); in tegra_aes_cra_exit()
389 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_aes_setkey()
390 return -EINVAL; in tegra_aes_setkey()
393 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key1_id); in tegra_aes_setkey()
395 ctx->keylen = keylen; in tegra_aes_setkey()
396 memcpy(ctx->key1, key, keylen); in tegra_aes_setkey()
411 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_xts_setkey()
412 return -EINVAL; in tegra_xts_setkey()
415 ret = tegra_key_submit(ctx->se, key, len, in tegra_xts_setkey()
416 ctx->alg, &ctx->key1_id); in tegra_xts_setkey()
418 ctx->keylen = len; in tegra_xts_setkey()
419 memcpy(ctx->key1, key, len); in tegra_xts_setkey()
422 ret = tegra_key_submit(ctx->se, key + len, len, in tegra_xts_setkey()
423 ctx->alg, &ctx->key2_id); in tegra_xts_setkey()
425 ctx->keylen = len; in tegra_xts_setkey()
426 memcpy(ctx->key2, key + len, len); in tegra_xts_setkey()
457 return -EINVAL; in tegra_aes_kac_manifest()
471 return -EINVAL; in tegra_aes_kac_manifest()
488 if (ctx->alg != SE_ALG_XTS) { in tegra_aes_crypt()
489 if (!IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(tfm))) { in tegra_aes_crypt()
490 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
491 return -EINVAL; in tegra_aes_crypt()
493 } else if (req->cryptlen < XTS_BLOCK_SIZE) { in tegra_aes_crypt()
494 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
495 return -EINVAL; in tegra_aes_crypt()
498 if (!req->cryptlen) in tegra_aes_crypt()
501 rctx->encrypt = encrypt; in tegra_aes_crypt()
503 return crypto_transfer_skcipher_request_to_engine(ctx->se->engine, req); in tegra_aes_crypt()
529 .cra_name = "cbc(aes)",
530 .cra_driver_name = "cbc-aes-tegra",
550 .cra_name = "ecb(aes)",
551 .cra_driver_name = "ecb-aes-tegra",
572 .cra_name = "ctr(aes)",
573 .cra_driver_name = "ctr-aes-tegra",
594 .cra_name = "xts(aes)",
595 .cra_driver_name = "xts-aes-tegra",
599 .cra_alignmask = (__alignof__(u64) - 1),
610 struct tegra_se *se = ctx->se; in tegra_gmac_prep_cmd()
611 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_gmac_prep_cmd()
613 data_count = (rctx->assoclen / AES_BLOCK_SIZE); in tegra_gmac_prep_cmd()
614 res_bits = (rctx->assoclen % AES_BLOCK_SIZE) * 8; in tegra_gmac_prep_cmd()
621 data_count--; in tegra_gmac_prep_cmd()
623 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_gmac_prep_cmd()
627 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 4); in tegra_gmac_prep_cmd()
628 cpuvaddr[i++] = rctx->config; in tegra_gmac_prep_cmd()
629 cpuvaddr[i++] = rctx->crypto_config; in tegra_gmac_prep_cmd()
630 cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr); in tegra_gmac_prep_cmd()
631 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->inbuf.addr)) | in tegra_gmac_prep_cmd()
632 SE_ADDR_HI_SZ(rctx->assoclen); in tegra_gmac_prep_cmd()
634 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_gmac_prep_cmd()
641 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_gmac_prep_cmd()
650 struct tegra_se *se = ctx->se; in tegra_gcm_crypt_prep_cmd()
651 u32 *cpuvaddr = se->cmdbuf->addr, op; in tegra_gcm_crypt_prep_cmd()
653 data_count = (rctx->cryptlen / AES_BLOCK_SIZE); in tegra_gcm_crypt_prep_cmd()
654 res_bits = (rctx->cryptlen % AES_BLOCK_SIZE) * 8; in tegra_gcm_crypt_prep_cmd()
662 if (!rctx->assoclen) in tegra_gcm_crypt_prep_cmd()
670 data_count--; in tegra_gcm_crypt_prep_cmd()
673 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_gcm_crypt_prep_cmd()
675 cpuvaddr[i++] = rctx->iv[j]; in tegra_gcm_crypt_prep_cmd()
677 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_gcm_crypt_prep_cmd()
681 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_gcm_crypt_prep_cmd()
682 cpuvaddr[i++] = rctx->config; in tegra_gcm_crypt_prep_cmd()
683 cpuvaddr[i++] = rctx->crypto_config; in tegra_gcm_crypt_prep_cmd()
686 cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr); in tegra_gcm_crypt_prep_cmd()
687 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->inbuf.addr)) | in tegra_gcm_crypt_prep_cmd()
688 SE_ADDR_HI_SZ(rctx->cryptlen); in tegra_gcm_crypt_prep_cmd()
691 cpuvaddr[i++] = lower_32_bits(rctx->outbuf.addr); in tegra_gcm_crypt_prep_cmd()
692 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->outbuf.addr)) | in tegra_gcm_crypt_prep_cmd()
693 SE_ADDR_HI_SZ(rctx->cryptlen); in tegra_gcm_crypt_prep_cmd()
695 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_gcm_crypt_prep_cmd()
700 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_gcm_crypt_prep_cmd()
702 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); in tegra_gcm_crypt_prep_cmd()
718 if (!rctx->assoclen && !rctx->cryptlen) in tegra_gcm_prep_final_cmd()
721 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->aad_len, 2); in tegra_gcm_prep_final_cmd()
722 cpuvaddr[i++] = rctx->assoclen * 8; in tegra_gcm_prep_final_cmd()
725 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->cryp_msg_len, 2); in tegra_gcm_prep_final_cmd()
726 cpuvaddr[i++] = rctx->cryptlen * 8; in tegra_gcm_prep_final_cmd()
730 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_gcm_prep_final_cmd()
732 cpuvaddr[i++] = rctx->iv[j]; in tegra_gcm_prep_final_cmd()
734 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_gcm_prep_final_cmd()
735 cpuvaddr[i++] = rctx->config; in tegra_gcm_prep_final_cmd()
736 cpuvaddr[i++] = rctx->crypto_config; in tegra_gcm_prep_final_cmd()
741 cpuvaddr[i++] = lower_32_bits(rctx->outbuf.addr); in tegra_gcm_prep_final_cmd()
742 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->outbuf.addr)) | in tegra_gcm_prep_final_cmd()
743 SE_ADDR_HI_SZ(0x10); /* HW always generates 128-bit tag */ in tegra_gcm_prep_final_cmd()
745 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_gcm_prep_final_cmd()
750 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_gcm_prep_final_cmd()
752 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); in tegra_gcm_prep_final_cmd()
759 struct tegra_se *se = ctx->se; in tegra_gcm_do_gmac()
762 scatterwalk_map_and_copy(rctx->inbuf.buf, in tegra_gcm_do_gmac()
763 rctx->src_sg, 0, rctx->assoclen, 0); in tegra_gcm_do_gmac()
765 rctx->config = tegra234_aes_cfg(SE_ALG_GMAC, rctx->encrypt); in tegra_gcm_do_gmac()
766 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GMAC, rctx->encrypt) | in tegra_gcm_do_gmac()
767 SE_AES_KEY_INDEX(rctx->key_id); in tegra_gcm_do_gmac()
771 return tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_gcm_do_gmac()
776 struct tegra_se *se = ctx->se; in tegra_gcm_do_crypt()
779 scatterwalk_map_and_copy(rctx->inbuf.buf, rctx->src_sg, in tegra_gcm_do_crypt()
780 rctx->assoclen, rctx->cryptlen, 0); in tegra_gcm_do_crypt()
782 rctx->config = tegra234_aes_cfg(SE_ALG_GCM, rctx->encrypt); in tegra_gcm_do_crypt()
783 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GCM, rctx->encrypt) | in tegra_gcm_do_crypt()
784 SE_AES_KEY_INDEX(rctx->key_id); in tegra_gcm_do_crypt()
788 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_gcm_do_crypt()
793 scatterwalk_map_and_copy(rctx->outbuf.buf, rctx->dst_sg, in tegra_gcm_do_crypt()
794 rctx->assoclen, rctx->cryptlen, 1); in tegra_gcm_do_crypt()
801 struct tegra_se *se = ctx->se; in tegra_gcm_do_final()
802 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_gcm_do_final()
805 rctx->config = tegra234_aes_cfg(SE_ALG_GCM_FINAL, rctx->encrypt); in tegra_gcm_do_final()
806 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GCM_FINAL, rctx->encrypt) | in tegra_gcm_do_final()
807 SE_AES_KEY_INDEX(rctx->key_id); in tegra_gcm_do_final()
811 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_gcm_do_final()
815 if (rctx->encrypt) { in tegra_gcm_do_final()
817 offset = rctx->assoclen + rctx->cryptlen; in tegra_gcm_do_final()
818 scatterwalk_map_and_copy(rctx->outbuf.buf, rctx->dst_sg, in tegra_gcm_do_final()
819 offset, rctx->authsize, 1); in tegra_gcm_do_final()
830 offset = rctx->assoclen + rctx->cryptlen; in tegra_gcm_do_verify()
831 scatterwalk_map_and_copy(mac, rctx->src_sg, offset, rctx->authsize, 0); in tegra_gcm_do_verify()
833 if (crypto_memneq(rctx->outbuf.buf, mac, rctx->authsize)) in tegra_gcm_do_verify()
834 return -EBADMSG; in tegra_gcm_do_verify()
841 /* iv[0] gives value of q-1 in tegra_ccm_check_iv()
842 * 2 <= q <= 8 as per NIST 800-38C notation in tegra_ccm_check_iv()
847 return -EINVAL; in tegra_ccm_check_iv()
857 struct tegra_se *se = ctx->se; in tegra_cbcmac_prep_cmd()
858 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_cbcmac_prep_cmd()
860 data_count = (rctx->inbuf.size / AES_BLOCK_SIZE) - 1; in tegra_cbcmac_prep_cmd()
862 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_cbcmac_prep_cmd()
865 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_cbcmac_prep_cmd()
866 cpuvaddr[i++] = rctx->config; in tegra_cbcmac_prep_cmd()
867 cpuvaddr[i++] = rctx->crypto_config; in tegra_cbcmac_prep_cmd()
869 cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr); in tegra_cbcmac_prep_cmd()
870 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->inbuf.addr)) | in tegra_cbcmac_prep_cmd()
871 SE_ADDR_HI_SZ(rctx->inbuf.size); in tegra_cbcmac_prep_cmd()
873 cpuvaddr[i++] = lower_32_bits(rctx->outbuf.addr); in tegra_cbcmac_prep_cmd()
874 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->outbuf.addr)) | in tegra_cbcmac_prep_cmd()
877 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_cbcmac_prep_cmd()
883 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_cbcmac_prep_cmd()
892 struct tegra_se *se = ctx->se; in tegra_ctr_prep_cmd()
893 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_ctr_prep_cmd()
896 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_ctr_prep_cmd()
898 cpuvaddr[i++] = rctx->iv[j]; in tegra_ctr_prep_cmd()
900 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_ctr_prep_cmd()
901 cpuvaddr[i++] = (rctx->inbuf.size / AES_BLOCK_SIZE) - 1; in tegra_ctr_prep_cmd()
902 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_ctr_prep_cmd()
903 cpuvaddr[i++] = rctx->config; in tegra_ctr_prep_cmd()
904 cpuvaddr[i++] = rctx->crypto_config; in tegra_ctr_prep_cmd()
907 cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr); in tegra_ctr_prep_cmd()
908 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->inbuf.addr)) | in tegra_ctr_prep_cmd()
909 SE_ADDR_HI_SZ(rctx->inbuf.size); in tegra_ctr_prep_cmd()
912 cpuvaddr[i++] = lower_32_bits(rctx->outbuf.addr); in tegra_ctr_prep_cmd()
913 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->outbuf.addr)) | in tegra_ctr_prep_cmd()
914 SE_ADDR_HI_SZ(rctx->inbuf.size); in tegra_ctr_prep_cmd()
916 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_ctr_prep_cmd()
922 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_ctr_prep_cmd()
924 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", in tegra_ctr_prep_cmd()
925 rctx->config, rctx->crypto_config); in tegra_ctr_prep_cmd()
932 struct tegra_se *se = ctx->se; in tegra_ccm_do_cbcmac()
935 rctx->config = tegra234_aes_cfg(SE_ALG_CBC_MAC, rctx->encrypt); in tegra_ccm_do_cbcmac()
936 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_CBC_MAC, in tegra_ccm_do_cbcmac()
937 rctx->encrypt) | in tegra_ccm_do_cbcmac()
938 SE_AES_KEY_INDEX(rctx->key_id); in tegra_ccm_do_cbcmac()
943 return tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_ccm_do_cbcmac()
956 return -EOVERFLOW; in tegra_ccm_set_msg_len()
959 memcpy(block - csize, (u8 *)&data + 4 - csize, csize); in tegra_ccm_set_msg_len()
967 u8 *q_ptr, *iv = (u8 *)rctx->iv; in tegra_ccm_format_nonce()
969 memcpy(nonce, rctx->iv, 16); in tegra_ccm_format_nonce()
974 t = rctx->authsize; in tegra_ccm_format_nonce()
975 nonce[0] |= (((t - 2) / 2) << 3); in tegra_ccm_format_nonce()
978 if (rctx->assoclen) in tegra_ccm_format_nonce()
981 /*** Encode Q - message length ***/ in tegra_ccm_format_nonce()
983 q_ptr = nonce + 16 - q; in tegra_ccm_format_nonce()
985 return tegra_ccm_set_msg_len(q_ptr, rctx->cryptlen, q); in tegra_ccm_format_nonce()
993 * RFC 3610 and NIST Special Publication 800-38C in tegra_ccm_format_adata()
1009 unsigned int padlen = 16 - (len % 16); in tegra_ccm_add_padding()
1030 memcpy(rctx->inbuf.buf, nonce, 16); in tegra_ccm_format_blocks()
1033 if (rctx->assoclen) { in tegra_ccm_format_blocks()
1034 alen = tegra_ccm_format_adata(adata, rctx->assoclen); in tegra_ccm_format_blocks()
1035 memcpy(rctx->inbuf.buf + offset, adata, alen); in tegra_ccm_format_blocks()
1038 scatterwalk_map_and_copy(rctx->inbuf.buf + offset, in tegra_ccm_format_blocks()
1039 rctx->src_sg, 0, rctx->assoclen, 0); in tegra_ccm_format_blocks()
1041 offset += rctx->assoclen; in tegra_ccm_format_blocks()
1042 offset += tegra_ccm_add_padding(rctx->inbuf.buf + offset, in tegra_ccm_format_blocks()
1043 rctx->assoclen + alen); in tegra_ccm_format_blocks()
1056 result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_ccm_mac_result()
1059 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_ccm_mac_result()
1061 if (rctx->encrypt) { in tegra_ccm_mac_result()
1062 memcpy(rctx->authdata, result, rctx->authsize); in tegra_ccm_mac_result()
1064 ret = crypto_memneq(rctx->authdata, result, rctx->authsize); in tegra_ccm_mac_result()
1066 return -EBADMSG; in tegra_ccm_mac_result()
1075 scatterwalk_map_and_copy(rctx->outbuf.buf + 16, rctx->dst_sg, in tegra_ccm_ctr_result()
1076 rctx->assoclen, rctx->cryptlen, 1); in tegra_ccm_ctr_result()
1078 if (rctx->encrypt) in tegra_ccm_ctr_result()
1079 scatterwalk_map_and_copy(rctx->outbuf.buf, rctx->dst_sg, in tegra_ccm_ctr_result()
1080 rctx->assoclen + rctx->cryptlen, in tegra_ccm_ctr_result()
1081 rctx->authsize, 1); in tegra_ccm_ctr_result()
1083 memcpy(rctx->authdata, rctx->outbuf.buf, rctx->authsize); in tegra_ccm_ctr_result()
1090 struct tegra_se *se = ctx->se; in tegra_ccm_compute_auth()
1096 return -EINVAL; in tegra_ccm_compute_auth()
1099 sg = rctx->encrypt ? rctx->src_sg : rctx->dst_sg; in tegra_ccm_compute_auth()
1101 scatterwalk_map_and_copy(rctx->inbuf.buf + offset, in tegra_ccm_compute_auth()
1102 sg, rctx->assoclen, in tegra_ccm_compute_auth()
1103 rctx->cryptlen, 0); in tegra_ccm_compute_auth()
1104 offset += rctx->cryptlen; in tegra_ccm_compute_auth()
1105 offset += tegra_ccm_add_padding(rctx->inbuf.buf + offset, rctx->cryptlen); in tegra_ccm_compute_auth()
1107 rctx->inbuf.size = offset; in tegra_ccm_compute_auth()
1118 struct tegra_se *se = ctx->se; in tegra_ccm_do_ctr()
1120 struct scatterlist *sg = rctx->src_sg; in tegra_ccm_do_ctr()
1123 rctx->config = tegra234_aes_cfg(SE_ALG_CTR, rctx->encrypt); in tegra_ccm_do_ctr()
1124 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_CTR, rctx->encrypt) | in tegra_ccm_do_ctr()
1125 SE_AES_KEY_INDEX(rctx->key_id); in tegra_ccm_do_ctr()
1128 if (rctx->encrypt) in tegra_ccm_do_ctr()
1129 memcpy(rctx->inbuf.buf, rctx->authdata, rctx->authsize); in tegra_ccm_do_ctr()
1131 scatterwalk_map_and_copy(rctx->inbuf.buf, sg, in tegra_ccm_do_ctr()
1132 rctx->assoclen + rctx->cryptlen, in tegra_ccm_do_ctr()
1133 rctx->authsize, 0); in tegra_ccm_do_ctr()
1135 offset += rctx->authsize; in tegra_ccm_do_ctr()
1136 offset += tegra_ccm_add_padding(rctx->inbuf.buf + offset, rctx->authsize); in tegra_ccm_do_ctr()
1139 if (rctx->cryptlen) { in tegra_ccm_do_ctr()
1140 scatterwalk_map_and_copy(rctx->inbuf.buf + offset, sg, in tegra_ccm_do_ctr()
1141 rctx->assoclen, rctx->cryptlen, 0); in tegra_ccm_do_ctr()
1142 offset += rctx->cryptlen; in tegra_ccm_do_ctr()
1143 offset += tegra_ccm_add_padding(rctx->inbuf.buf + offset, rctx->cryptlen); in tegra_ccm_do_ctr()
1146 rctx->inbuf.size = offset; in tegra_ccm_do_ctr()
1150 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_ccm_do_ctr()
1161 u8 *iv = (u8 *)rctx->iv; in tegra_ccm_crypt_init()
1164 rctx->src_sg = req->src; in tegra_ccm_crypt_init()
1165 rctx->dst_sg = req->dst; in tegra_ccm_crypt_init()
1166 rctx->assoclen = req->assoclen; in tegra_ccm_crypt_init()
1167 rctx->authsize = crypto_aead_authsize(tfm); in tegra_ccm_crypt_init()
1169 if (rctx->encrypt) in tegra_ccm_crypt_init()
1170 rctx->cryptlen = req->cryptlen; in tegra_ccm_crypt_init()
1172 rctx->cryptlen = req->cryptlen - rctx->authsize; in tegra_ccm_crypt_init()
1174 memcpy(iv, req->iv, 16); in tegra_ccm_crypt_init()
1180 /* Note: rfc 3610 and NIST 800-38C require counter (ctr_0) of in tegra_ccm_crypt_init()
1182 * req->iv has the formatted ctr_0 (i.e. Flags || N || 0). in tegra_ccm_crypt_init()
1184 memset(iv + 15 - iv[0], 0, iv[0] + 1); in tegra_ccm_crypt_init()
1188 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_ccm_crypt_init()
1199 struct tegra_se *se = ctx->se; in tegra_ccm_do_one_req()
1206 rctx->key_id = ctx->key_id; in tegra_ccm_do_one_req()
1209 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; in tegra_ccm_do_one_req()
1210 rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_ccm_do_one_req()
1211 &rctx->inbuf.addr, GFP_KERNEL); in tegra_ccm_do_one_req()
1212 if (!rctx->inbuf.buf) in tegra_ccm_do_one_req()
1215 rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; in tegra_ccm_do_one_req()
1216 rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_ccm_do_one_req()
1217 &rctx->outbuf.addr, GFP_KERNEL); in tegra_ccm_do_one_req()
1218 if (!rctx->outbuf.buf) { in tegra_ccm_do_one_req()
1219 ret = -ENOMEM; in tegra_ccm_do_one_req()
1223 if (!ctx->key_id) { in tegra_ccm_do_one_req()
1224 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, in tegra_ccm_do_one_req()
1225 ctx->keylen, ctx->alg, &rctx->key_id); in tegra_ccm_do_one_req()
1230 if (rctx->encrypt) { in tegra_ccm_do_one_req()
1253 dma_free_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_ccm_do_one_req()
1254 rctx->outbuf.buf, rctx->outbuf.addr); in tegra_ccm_do_one_req()
1257 dma_free_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_ccm_do_one_req()
1258 rctx->inbuf.buf, rctx->inbuf.addr); in tegra_ccm_do_one_req()
1260 if (tegra_key_is_reserved(rctx->key_id)) in tegra_ccm_do_one_req()
1261 tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); in tegra_ccm_do_one_req()
1264 crypto_finalize_aead_request(ctx->se->engine, req, ret); in tegra_ccm_do_one_req()
1277 rctx->src_sg = req->src; in tegra_gcm_do_one_req()
1278 rctx->dst_sg = req->dst; in tegra_gcm_do_one_req()
1279 rctx->assoclen = req->assoclen; in tegra_gcm_do_one_req()
1280 rctx->authsize = crypto_aead_authsize(tfm); in tegra_gcm_do_one_req()
1282 if (rctx->encrypt) in tegra_gcm_do_one_req()
1283 rctx->cryptlen = req->cryptlen; in tegra_gcm_do_one_req()
1285 rctx->cryptlen = req->cryptlen - ctx->authsize; in tegra_gcm_do_one_req()
1287 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); in tegra_gcm_do_one_req()
1288 rctx->iv[3] = (1 << 24); in tegra_gcm_do_one_req()
1290 rctx->key_id = ctx->key_id; in tegra_gcm_do_one_req()
1293 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; in tegra_gcm_do_one_req()
1294 rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_gcm_do_one_req()
1295 &rctx->inbuf.addr, GFP_KERNEL); in tegra_gcm_do_one_req()
1296 if (!rctx->inbuf.buf) { in tegra_gcm_do_one_req()
1297 ret = -ENOMEM; in tegra_gcm_do_one_req()
1301 rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; in tegra_gcm_do_one_req()
1302 rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_gcm_do_one_req()
1303 &rctx->outbuf.addr, GFP_KERNEL); in tegra_gcm_do_one_req()
1304 if (!rctx->outbuf.buf) { in tegra_gcm_do_one_req()
1305 ret = -ENOMEM; in tegra_gcm_do_one_req()
1309 if (!ctx->key_id) { in tegra_gcm_do_one_req()
1310 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, in tegra_gcm_do_one_req()
1311 ctx->keylen, ctx->alg, &rctx->key_id); in tegra_gcm_do_one_req()
1317 if (rctx->assoclen) { in tegra_gcm_do_one_req()
1324 if (rctx->cryptlen) { in tegra_gcm_do_one_req()
1335 if (!rctx->encrypt) in tegra_gcm_do_one_req()
1336 ret = tegra_gcm_do_verify(ctx->se, rctx); in tegra_gcm_do_one_req()
1339 dma_free_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_gcm_do_one_req()
1340 rctx->outbuf.buf, rctx->outbuf.addr); in tegra_gcm_do_one_req()
1343 dma_free_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_gcm_do_one_req()
1344 rctx->inbuf.buf, rctx->inbuf.addr); in tegra_gcm_do_one_req()
1346 if (tegra_key_is_reserved(rctx->key_id)) in tegra_gcm_do_one_req()
1347 tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); in tegra_gcm_do_one_req()
1350 crypto_finalize_aead_request(ctx->se->engine, req, ret); in tegra_gcm_do_one_req()
1363 algname = crypto_tfm_alg_name(&tfm->base); in tegra_aead_cra_init()
1369 ctx->se = se_alg->se_dev; in tegra_aead_cra_init()
1370 ctx->key_id = 0; in tegra_aead_cra_init()
1371 ctx->keylen = 0; in tegra_aead_cra_init()
1375 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_aead_cra_init()
1379 ctx->alg = ret; in tegra_aead_cra_init()
1398 return -EINVAL; in tegra_ccm_setauthsize()
1401 ctx->authsize = authsize; in tegra_ccm_setauthsize()
1415 ctx->authsize = authsize; in tegra_gcm_setauthsize()
1422 struct tegra_aead_ctx *ctx = crypto_tfm_ctx(&tfm->base); in tegra_aead_cra_exit()
1424 if (ctx->key_id) in tegra_aead_cra_exit()
1425 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); in tegra_aead_cra_exit()
1434 rctx->encrypt = encrypt; in tegra_aead_crypt()
1436 return crypto_transfer_aead_request_to_engine(ctx->se->engine, req); in tegra_aead_crypt()
1456 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_aead_setkey()
1457 return -EINVAL; in tegra_aead_setkey()
1460 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); in tegra_aead_setkey()
1462 ctx->keylen = keylen; in tegra_aead_setkey()
1463 memcpy(ctx->key, key, keylen); in tegra_aead_setkey()
1473 struct tegra_se *se = ctx->se; in tegra_cmac_prep_cmd()
1474 u32 *cpuvaddr = se->cmdbuf->addr, op; in tegra_cmac_prep_cmd()
1476 data_count = (rctx->datbuf.size / AES_BLOCK_SIZE); in tegra_cmac_prep_cmd()
1480 if (!(rctx->task & SHA_UPDATE)) { in tegra_cmac_prep_cmd()
1482 res_bits = (rctx->datbuf.size % AES_BLOCK_SIZE) * 8; in tegra_cmac_prep_cmd()
1486 data_count--; in tegra_cmac_prep_cmd()
1488 if (rctx->task & SHA_FIRST) { in tegra_cmac_prep_cmd()
1489 rctx->task &= ~SHA_FIRST; in tegra_cmac_prep_cmd()
1492 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_cmac_prep_cmd()
1498 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_cmac_prep_cmd()
1502 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_cmac_prep_cmd()
1503 cpuvaddr[i++] = rctx->config; in tegra_cmac_prep_cmd()
1504 cpuvaddr[i++] = rctx->crypto_config; in tegra_cmac_prep_cmd()
1507 cpuvaddr[i++] = lower_32_bits(rctx->datbuf.addr); in tegra_cmac_prep_cmd()
1508 cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | in tegra_cmac_prep_cmd()
1509 SE_ADDR_HI_SZ(rctx->datbuf.size); in tegra_cmac_prep_cmd()
1513 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_cmac_prep_cmd()
1518 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_cmac_prep_cmd()
1528 rctx->result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_copy_result()
1536 writel(rctx->result[i], in tegra_cmac_paste_result()
1537 se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_paste_result()
1545 struct tegra_se *se = ctx->se; in tegra_cmac_do_init()
1548 rctx->total_len = 0; in tegra_cmac_do_init()
1549 rctx->datbuf.size = 0; in tegra_cmac_do_init()
1550 rctx->residue.size = 0; in tegra_cmac_do_init()
1551 rctx->key_id = ctx->key_id; in tegra_cmac_do_init()
1552 rctx->task |= SHA_FIRST; in tegra_cmac_do_init()
1553 rctx->blk_size = crypto_ahash_blocksize(tfm); in tegra_cmac_do_init()
1555 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size * 2, in tegra_cmac_do_init()
1556 &rctx->residue.addr, GFP_KERNEL); in tegra_cmac_do_init()
1557 if (!rctx->residue.buf) in tegra_cmac_do_init()
1558 return -ENOMEM; in tegra_cmac_do_init()
1560 rctx->residue.size = 0; in tegra_cmac_do_init()
1564 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_do_init()
1574 struct tegra_se *se = ctx->se; in tegra_cmac_do_update()
1578 if (!req->nbytes) in tegra_cmac_do_update()
1581 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size; in tegra_cmac_do_update()
1582 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size; in tegra_cmac_do_update()
1588 nresidue += rctx->blk_size; in tegra_cmac_do_update()
1589 nblks--; in tegra_cmac_do_update()
1592 rctx->src_sg = req->src; in tegra_cmac_do_update()
1593 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; in tegra_cmac_do_update()
1594 rctx->total_len += rctx->datbuf.size; in tegra_cmac_do_update()
1595 rctx->config = tegra234_aes_cfg(SE_ALG_CMAC, 0); in tegra_cmac_do_update()
1596 rctx->crypto_config = SE_AES_KEY_INDEX(rctx->key_id); in tegra_cmac_do_update()
1603 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size, in tegra_cmac_do_update()
1604 rctx->src_sg, 0, req->nbytes, 0); in tegra_cmac_do_update()
1606 rctx->residue.size += req->nbytes; in tegra_cmac_do_update()
1610 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, in tegra_cmac_do_update()
1611 &rctx->datbuf.addr, GFP_KERNEL); in tegra_cmac_do_update()
1612 if (!rctx->datbuf.buf) in tegra_cmac_do_update()
1613 return -ENOMEM; in tegra_cmac_do_update()
1616 if (rctx->residue.size) in tegra_cmac_do_update()
1617 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_cmac_do_update()
1619 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size, in tegra_cmac_do_update()
1620 rctx->src_sg, 0, req->nbytes - nresidue, 0); in tegra_cmac_do_update()
1622 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, in tegra_cmac_do_update()
1623 req->nbytes - nresidue, nresidue, 0); in tegra_cmac_do_update()
1626 rctx->residue.size = nresidue; in tegra_cmac_do_update()
1632 if (!(rctx->task & SHA_FIRST)) in tegra_cmac_do_update()
1633 tegra_cmac_paste_result(ctx->se, rctx); in tegra_cmac_do_update()
1636 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_cmac_do_update()
1638 tegra_cmac_copy_result(ctx->se, rctx); in tegra_cmac_do_update()
1640 dma_free_coherent(ctx->se->dev, rctx->datbuf.size, in tegra_cmac_do_update()
1641 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_cmac_do_update()
1651 struct tegra_se *se = ctx->se; in tegra_cmac_do_final()
1652 u32 *result = (u32 *)req->result; in tegra_cmac_do_final()
1655 if (!req->nbytes && !rctx->total_len && ctx->fallback_tfm) { in tegra_cmac_do_final()
1656 return crypto_shash_tfm_digest(ctx->fallback_tfm, in tegra_cmac_do_final()
1657 NULL, 0, req->result); in tegra_cmac_do_final()
1660 if (rctx->residue.size) { in tegra_cmac_do_final()
1661 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size, in tegra_cmac_do_final()
1662 &rctx->datbuf.addr, GFP_KERNEL); in tegra_cmac_do_final()
1663 if (!rctx->datbuf.buf) { in tegra_cmac_do_final()
1664 ret = -ENOMEM; in tegra_cmac_do_final()
1668 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_cmac_do_final()
1671 rctx->datbuf.size = rctx->residue.size; in tegra_cmac_do_final()
1672 rctx->total_len += rctx->residue.size; in tegra_cmac_do_final()
1673 rctx->config = tegra234_aes_cfg(SE_ALG_CMAC, 0); in tegra_cmac_do_final()
1679 if (!(rctx->task & SHA_FIRST)) in tegra_cmac_do_final()
1680 tegra_cmac_paste_result(ctx->se, rctx); in tegra_cmac_do_final()
1684 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_cmac_do_final()
1690 result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_do_final()
1693 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_do_final()
1696 if (rctx->residue.size) in tegra_cmac_do_final()
1697 dma_free_coherent(se->dev, rctx->datbuf.size, in tegra_cmac_do_final()
1698 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_cmac_do_final()
1700 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm) * 2, in tegra_cmac_do_final()
1701 rctx->residue.buf, rctx->residue.addr); in tegra_cmac_do_final()
1711 struct tegra_se *se = ctx->se; in tegra_cmac_do_one_req()
1714 if (rctx->task & SHA_INIT) { in tegra_cmac_do_one_req()
1719 rctx->task &= ~SHA_INIT; in tegra_cmac_do_one_req()
1722 if (!ctx->key_id) { in tegra_cmac_do_one_req()
1723 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, in tegra_cmac_do_one_req()
1724 ctx->keylen, ctx->alg, &rctx->key_id); in tegra_cmac_do_one_req()
1729 if (rctx->task & SHA_UPDATE) { in tegra_cmac_do_one_req()
1734 rctx->task &= ~SHA_UPDATE; in tegra_cmac_do_one_req()
1737 if (rctx->task & SHA_FINAL) { in tegra_cmac_do_one_req()
1742 rctx->task &= ~SHA_FINAL; in tegra_cmac_do_one_req()
1745 if (tegra_key_is_reserved(rctx->key_id)) in tegra_cmac_do_one_req()
1746 tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); in tegra_cmac_do_one_req()
1748 crypto_finalize_hash_request(se->engine, req, ret); in tegra_cmac_do_one_req()
1758 ctx->fallback_tfm = crypto_alloc_shash(algname, 0, CRYPTO_ALG_NEED_FALLBACK); in tegra_cmac_init_fallback()
1760 if (IS_ERR(ctx->fallback_tfm)) { in tegra_cmac_init_fallback()
1761 dev_warn(ctx->se->dev, "failed to allocate fallback for %s\n", algname); in tegra_cmac_init_fallback()
1762 ctx->fallback_tfm = NULL; in tegra_cmac_init_fallback()
1766 statesize = crypto_shash_statesize(ctx->fallback_tfm); in tegra_cmac_init_fallback()
1776 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); in tegra_cmac_cra_init()
1786 ctx->se = se_alg->se_dev; in tegra_cmac_cra_init()
1787 ctx->key_id = 0; in tegra_cmac_cra_init()
1788 ctx->keylen = 0; in tegra_cmac_cra_init()
1792 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_cmac_cra_init()
1796 ctx->alg = ret; in tegra_cmac_cra_init()
1807 if (ctx->fallback_tfm) in tegra_cmac_cra_exit()
1808 crypto_free_shash(ctx->fallback_tfm); in tegra_cmac_cra_exit()
1810 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); in tegra_cmac_cra_exit()
1820 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_cmac_setkey()
1821 return -EINVAL; in tegra_cmac_setkey()
1824 if (ctx->fallback_tfm) in tegra_cmac_setkey()
1825 crypto_shash_setkey(ctx->fallback_tfm, key, keylen); in tegra_cmac_setkey()
1827 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); in tegra_cmac_setkey()
1829 ctx->keylen = keylen; in tegra_cmac_setkey()
1830 memcpy(ctx->key, key, keylen); in tegra_cmac_setkey()
1842 rctx->task = SHA_INIT; in tegra_cmac_init()
1844 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_init()
1853 rctx->task |= SHA_UPDATE; in tegra_cmac_update()
1855 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_update()
1864 rctx->task |= SHA_FINAL; in tegra_cmac_final()
1866 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_final()
1875 rctx->task |= SHA_UPDATE | SHA_FINAL; in tegra_cmac_finup()
1877 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_finup()
1886 rctx->task |= SHA_INIT | SHA_UPDATE | SHA_FINAL; in tegra_cmac_digest()
1888 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_digest()
1922 .cra_name = "gcm(aes)",
1923 .cra_driver_name = "gcm-aes-tegra",
1944 .cra_name = "ccm(aes)",
1945 .cra_driver_name = "ccm-aes-tegra",
1971 .cra_name = "cmac(aes)",
1972 .cra_driver_name = "tegra-se-cmac",
1993 se->manifest = tegra_aes_kac_manifest; in tegra_init_aes()
2001 dev_err(se->dev, "failed to register %s\n", in tegra_init_aes()
2002 sk_alg->base.base.cra_name); in tegra_init_aes()
2013 dev_err(se->dev, "failed to register %s\n", in tegra_init_aes()
2014 aead_alg->base.base.cra_name); in tegra_init_aes()
2025 dev_err(se->dev, "failed to register %s\n", in tegra_init_aes()
2026 ahash_alg->base.halg.base.cra_name); in tegra_init_aes()
2034 while (i--) in tegra_init_aes()
2039 while (i--) in tegra_init_aes()
2044 while (i--) in tegra_init_aes()