Lines Matching +full:sha3 +full:- +full:512
1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
8 #include <linux/dma-mapping.h>
16 #include <crypto/sha3.h>
22 #include "tegra-se.h"
107 return -EINVAL; in tegra_sha_get_config()
119 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_init()
120 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_init()
123 return crypto_ahash_init(&rctx->fallback_req); in tegra_sha_fallback_init()
132 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_update()
133 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_update()
135 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_update()
136 rctx->fallback_req.src = req->src; in tegra_sha_fallback_update()
138 return crypto_ahash_update(&rctx->fallback_req); in tegra_sha_fallback_update()
147 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_final()
148 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_final()
150 rctx->fallback_req.result = req->result; in tegra_sha_fallback_final()
152 return crypto_ahash_final(&rctx->fallback_req); in tegra_sha_fallback_final()
161 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_finup()
162 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_finup()
165 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_finup()
166 rctx->fallback_req.src = req->src; in tegra_sha_fallback_finup()
167 rctx->fallback_req.result = req->result; in tegra_sha_fallback_finup()
169 return crypto_ahash_finup(&rctx->fallback_req); in tegra_sha_fallback_finup()
178 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_digest()
179 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_digest()
182 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_digest()
183 rctx->fallback_req.src = req->src; in tegra_sha_fallback_digest()
184 rctx->fallback_req.result = req->result; in tegra_sha_fallback_digest()
186 return crypto_ahash_digest(&rctx->fallback_req); in tegra_sha_fallback_digest()
195 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_import()
196 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_import()
199 return crypto_ahash_import(&rctx->fallback_req, in); in tegra_sha_fallback_import()
208 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_export()
209 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_export()
212 return crypto_ahash_export(&rctx->fallback_req, out); in tegra_sha_fallback_export()
218 __be32 *res_be = (__be32 *)rctx->intr_res.buf; in tegra_se_insert_hash_result()
219 u32 *res = (u32 *)rctx->intr_res.buf; in tegra_se_insert_hash_result()
230 * The initial, intermediate and final hash value of SHA-384, SHA-512 in tegra_se_insert_hash_result()
233 * +---------------+------------+ in tegra_se_insert_hash_result()
235 * +---------------+------------+ in tegra_se_insert_hash_result()
237 * +---------------+------------+ in tegra_se_insert_hash_result()
239 * +---------------+------------+ in tegra_se_insert_hash_result()
241 * +---------------+------------+ in tegra_se_insert_hash_result()
243 * +---------------+------------+ in tegra_se_insert_hash_result()
245 * +---------------+------------+ in tegra_se_insert_hash_result()
247 * +---------------+------------+ in tegra_se_insert_hash_result()
250 if (ctx->alg == SE_ALG_SHA384 || ctx->alg == SE_ALG_SHA512) in tegra_se_insert_hash_result()
251 idx = (j % 2) ? j - 1 : j + 1; in tegra_se_insert_hash_result()
253 /* For SHA-1, SHA-224, SHA-256, SHA-384, SHA-512 the initial in tegra_se_insert_hash_result()
256 * little-endian. in tegra_se_insert_hash_result()
258 if (ctx->alg <= SE_ALG_SHA512) in tegra_se_insert_hash_result()
270 struct tegra_se *se = ctx->se; in tegra_sha_prep_cmd()
274 msg_len = rctx->total_len * 8; in tegra_sha_prep_cmd()
275 msg_left = rctx->datbuf.size * 8; in tegra_sha_prep_cmd()
278 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine, in tegra_sha_prep_cmd()
283 if (rctx->task & SHA_UPDATE) { in tegra_sha_prep_cmd()
300 cpuvaddr[i++] = rctx->config; in tegra_sha_prep_cmd()
302 if (rctx->task & SHA_FIRST) { in tegra_sha_prep_cmd()
304 rctx->task &= ~SHA_FIRST; in tegra_sha_prep_cmd()
315 cpuvaddr[i++] = rctx->datbuf.addr; in tegra_sha_prep_cmd()
316 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | in tegra_sha_prep_cmd()
317 SE_ADDR_HI_SZ(rctx->datbuf.size)); in tegra_sha_prep_cmd()
319 if (rctx->task & SHA_UPDATE) { in tegra_sha_prep_cmd()
320 cpuvaddr[i++] = rctx->intr_res.addr; in tegra_sha_prep_cmd()
321 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->intr_res.addr)) | in tegra_sha_prep_cmd()
322 SE_ADDR_HI_SZ(rctx->intr_res.size)); in tegra_sha_prep_cmd()
324 cpuvaddr[i++] = rctx->digest.addr; in tegra_sha_prep_cmd()
325 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) | in tegra_sha_prep_cmd()
326 SE_ADDR_HI_SZ(rctx->digest.size)); in tegra_sha_prep_cmd()
329 if (rctx->key_id) { in tegra_sha_prep_cmd()
332 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id); in tegra_sha_prep_cmd()
341 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_sha_prep_cmd()
343 dev_dbg(se->dev, "msg len %llu msg left %llu sz %zd cfg %#x", in tegra_sha_prep_cmd()
344 msg_len, msg_left, rctx->datbuf.size, rctx->config); in tegra_sha_prep_cmd()
354 struct tegra_se *se = ctx->se; in tegra_sha_do_init()
356 if (ctx->fallback) in tegra_sha_do_init()
359 rctx->total_len = 0; in tegra_sha_do_init()
360 rctx->datbuf.size = 0; in tegra_sha_do_init()
361 rctx->residue.size = 0; in tegra_sha_do_init()
362 rctx->key_id = ctx->key_id; in tegra_sha_do_init()
363 rctx->task |= SHA_FIRST; in tegra_sha_do_init()
364 rctx->alg = ctx->alg; in tegra_sha_do_init()
365 rctx->blk_size = crypto_ahash_blocksize(tfm); in tegra_sha_do_init()
366 rctx->digest.size = crypto_ahash_digestsize(tfm); in tegra_sha_do_init()
368 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size, in tegra_sha_do_init()
369 &rctx->digest.addr, GFP_KERNEL); in tegra_sha_do_init()
370 if (!rctx->digest.buf) in tegra_sha_do_init()
373 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size, in tegra_sha_do_init()
374 &rctx->residue.addr, GFP_KERNEL); in tegra_sha_do_init()
375 if (!rctx->residue.buf) in tegra_sha_do_init()
378 rctx->intr_res.size = HASH_RESULT_REG_COUNT * 4; in tegra_sha_do_init()
379 rctx->intr_res.buf = dma_alloc_coherent(se->dev, rctx->intr_res.size, in tegra_sha_do_init()
380 &rctx->intr_res.addr, GFP_KERNEL); in tegra_sha_do_init()
381 if (!rctx->intr_res.buf) in tegra_sha_do_init()
387 dma_free_coherent(se->dev, rctx->residue.size, rctx->residue.buf, in tegra_sha_do_init()
388 rctx->residue.addr); in tegra_sha_do_init()
390 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, in tegra_sha_do_init()
391 rctx->digest.addr); in tegra_sha_do_init()
393 return -ENOMEM; in tegra_sha_do_init()
400 struct tegra_se *se = ctx->se; in tegra_sha_do_update()
402 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_sha_do_update()
404 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size; in tegra_sha_do_update()
405 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size; in tegra_sha_do_update()
412 nresidue = rctx->blk_size; in tegra_sha_do_update()
413 nblks--; in tegra_sha_do_update()
416 rctx->src_sg = req->src; in tegra_sha_do_update()
417 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; in tegra_sha_do_update()
424 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size, in tegra_sha_do_update()
425 rctx->src_sg, 0, req->nbytes, 0); in tegra_sha_do_update()
426 rctx->residue.size += req->nbytes; in tegra_sha_do_update()
431 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, in tegra_sha_do_update()
432 &rctx->datbuf.addr, GFP_KERNEL); in tegra_sha_do_update()
433 if (!rctx->datbuf.buf) in tegra_sha_do_update()
434 return -ENOMEM; in tegra_sha_do_update()
437 if (rctx->residue.size) in tegra_sha_do_update()
438 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_sha_do_update()
440 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size, in tegra_sha_do_update()
441 rctx->src_sg, 0, req->nbytes - nresidue, 0); in tegra_sha_do_update()
443 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, in tegra_sha_do_update()
444 req->nbytes - nresidue, nresidue, 0); in tegra_sha_do_update()
447 rctx->residue.size = nresidue; in tegra_sha_do_update()
448 rctx->total_len += rctx->datbuf.size; in tegra_sha_do_update()
450 rctx->config = tegra_sha_get_config(rctx->alg) | in tegra_sha_do_update()
454 ret = tegra_se_host1x_submit(se, se->cmdbuf, size); in tegra_sha_do_update()
456 dma_free_coherent(se->dev, rctx->datbuf.size, in tegra_sha_do_update()
457 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_sha_do_update()
467 struct tegra_se *se = ctx->se; in tegra_sha_do_final()
468 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_sha_do_final()
471 if (rctx->residue.size) { in tegra_sha_do_final()
472 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size, in tegra_sha_do_final()
473 &rctx->datbuf.addr, GFP_KERNEL); in tegra_sha_do_final()
474 if (!rctx->datbuf.buf) { in tegra_sha_do_final()
475 ret = -ENOMEM; in tegra_sha_do_final()
479 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_sha_do_final()
482 rctx->datbuf.size = rctx->residue.size; in tegra_sha_do_final()
483 rctx->total_len += rctx->residue.size; in tegra_sha_do_final()
485 rctx->config = tegra_sha_get_config(rctx->alg) | in tegra_sha_do_final()
489 ret = tegra_se_host1x_submit(se, se->cmdbuf, size); in tegra_sha_do_final()
494 memcpy(req->result, rctx->digest.buf, rctx->digest.size); in tegra_sha_do_final()
497 if (rctx->residue.size) in tegra_sha_do_final()
498 dma_free_coherent(se->dev, rctx->datbuf.size, in tegra_sha_do_final()
499 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_sha_do_final()
501 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm), in tegra_sha_do_final()
502 rctx->residue.buf, rctx->residue.addr); in tegra_sha_do_final()
503 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, in tegra_sha_do_final()
504 rctx->digest.addr); in tegra_sha_do_final()
506 dma_free_coherent(se->dev, rctx->intr_res.size, rctx->intr_res.buf, in tegra_sha_do_final()
507 rctx->intr_res.addr); in tegra_sha_do_final()
518 struct tegra_se *se = ctx->se; in tegra_sha_do_one_req()
521 if (rctx->task & SHA_INIT) { in tegra_sha_do_one_req()
526 rctx->task &= ~SHA_INIT; in tegra_sha_do_one_req()
529 if (rctx->task & SHA_UPDATE) { in tegra_sha_do_one_req()
534 rctx->task &= ~SHA_UPDATE; in tegra_sha_do_one_req()
537 if (rctx->task & SHA_FINAL) { in tegra_sha_do_one_req()
542 rctx->task &= ~SHA_FINAL; in tegra_sha_do_one_req()
546 crypto_finalize_hash_request(se->engine, req, ret); in tegra_sha_do_one_req()
556 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC | in tegra_sha_init_fallback()
559 if (IS_ERR(ctx->fallback_tfm)) { in tegra_sha_init_fallback()
560 dev_warn(ctx->se->dev, in tegra_sha_init_fallback()
562 ctx->fallback_tfm = NULL; in tegra_sha_init_fallback()
566 statesize = crypto_ahash_statesize(ctx->fallback_tfm); in tegra_sha_init_fallback()
574 crypto_ahash_reqsize(ctx->fallback_tfm)); in tegra_sha_init_fallback()
581 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); in tegra_sha_cra_init()
591 ctx->se = se_alg->se_dev; in tegra_sha_cra_init()
592 ctx->fallback = false; in tegra_sha_cra_init()
593 ctx->key_id = 0; in tegra_sha_cra_init()
597 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_sha_cra_init()
601 if (se_alg->alg_base) in tegra_sha_cra_init()
604 ctx->alg = ret; in tegra_sha_cra_init()
613 if (ctx->fallback_tfm) in tegra_sha_cra_exit()
614 crypto_free_ahash(ctx->fallback_tfm); in tegra_sha_cra_exit()
616 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); in tegra_sha_cra_exit()
622 if (!ctx->fallback_tfm) { in tegra_hmac_fallback_setkey()
623 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_hmac_fallback_setkey()
624 return -EINVAL; in tegra_hmac_fallback_setkey()
627 ctx->fallback = true; in tegra_hmac_fallback_setkey()
628 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen); in tegra_hmac_fallback_setkey()
640 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); in tegra_hmac_setkey()
644 ctx->fallback = false; in tegra_hmac_setkey()
655 rctx->task = SHA_INIT; in tegra_sha_init()
657 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_sha_init()
666 if (ctx->fallback) in tegra_sha_update()
669 rctx->task |= SHA_UPDATE; in tegra_sha_update()
671 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_sha_update()
680 if (ctx->fallback) in tegra_sha_final()
683 rctx->task |= SHA_FINAL; in tegra_sha_final()
685 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_sha_final()
694 if (ctx->fallback) in tegra_sha_finup()
697 rctx->task |= SHA_UPDATE | SHA_FINAL; in tegra_sha_finup()
699 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_sha_finup()
708 if (ctx->fallback) in tegra_sha_digest()
711 rctx->task |= SHA_INIT | SHA_UPDATE | SHA_FINAL; in tegra_sha_digest()
713 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_sha_digest()
722 if (ctx->fallback) in tegra_sha_export()
736 if (ctx->fallback) in tegra_sha_import()
759 .cra_driver_name = "tegra-se-sha1",
784 .cra_driver_name = "tegra-se-sha224",
809 .cra_driver_name = "tegra-se-sha256",
834 .cra_driver_name = "tegra-se-sha384",
859 .cra_driver_name = "tegra-se-sha512",
883 .cra_name = "sha3-224",
884 .cra_driver_name = "tegra-se-sha3-224",
908 .cra_name = "sha3-256",
909 .cra_driver_name = "tegra-se-sha3-256",
933 .cra_name = "sha3-384",
934 .cra_driver_name = "tegra-se-sha3-384",
958 .cra_name = "sha3-512",
959 .cra_driver_name = "tegra-se-sha3-512",
986 .cra_driver_name = "tegra-se-hmac-sha224",
1013 .cra_driver_name = "tegra-se-hmac-sha256",
1040 .cra_driver_name = "tegra-se-hmac-sha384",
1067 .cra_driver_name = "tegra-se-hmac-sha512",
1095 return -EINVAL; in tegra_hash_kac_manifest()
1119 se->manifest = tegra_hash_kac_manifest; in tegra_init_hash()
1127 dev_err(se->dev, "failed to register %s\n", in tegra_init_hash()
1128 alg->base.halg.base.cra_name); in tegra_init_hash()
1136 while (i--) in tegra_init_hash()