Lines Matching full:se
23 #include "tegra-se.h"
26 struct tegra_se *se; member
46 struct tegra_se *se; member
71 struct tegra_se *se; member
212 struct tegra_se *se = ctx->se; in tegra_aes_prep_cmd() local
213 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_aes_prep_cmd()
228 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_aes_prep_cmd()
233 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_aes_prep_cmd()
237 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_aes_prep_cmd()
250 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_aes_prep_cmd()
256 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_aes_prep_cmd()
258 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); in tegra_aes_prep_cmd()
268 struct tegra_se *se = ctx->se; in tegra_aes_do_one_req() local
284 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, in tegra_aes_do_one_req()
297 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key1, in tegra_aes_do_one_req()
307 ret = tegra_key_submit_reserved_xts(ctx->se, ctx->key2, in tegra_aes_do_one_req()
318 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_aes_do_one_req()
326 dma_free_coherent(ctx->se->dev, rctx->datbuf.size, in tegra_aes_do_one_req()
330 tegra_key_invalidate_reserved(ctx->se, key1_id, ctx->alg); in tegra_aes_do_one_req()
333 tegra_key_invalidate_reserved(ctx->se, key2_id, ctx->alg); in tegra_aes_do_one_req()
336 crypto_finalize_skcipher_request(se->engine, req, ret); in tegra_aes_do_one_req()
354 ctx->se = se_alg->se_dev; in tegra_aes_cra_init()
362 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_aes_cra_init()
376 tegra_key_invalidate(ctx->se, ctx->key1_id, ctx->alg); in tegra_aes_cra_exit()
379 tegra_key_invalidate(ctx->se, ctx->key2_id, ctx->alg); in tegra_aes_cra_exit()
389 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_aes_setkey()
393 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key1_id); in tegra_aes_setkey()
411 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_xts_setkey()
415 ret = tegra_key_submit(ctx->se, key, len, in tegra_xts_setkey()
422 ret = tegra_key_submit(ctx->se, key + len, len, in tegra_xts_setkey()
490 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
494 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
503 return crypto_transfer_skcipher_request_to_engine(ctx->se->engine, req); in tegra_aes_crypt()
610 struct tegra_se *se = ctx->se; in tegra_gmac_prep_cmd() local
611 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_gmac_prep_cmd()
623 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_gmac_prep_cmd()
627 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 4); in tegra_gmac_prep_cmd()
634 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_gmac_prep_cmd()
641 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_gmac_prep_cmd()
650 struct tegra_se *se = ctx->se; in tegra_gcm_crypt_prep_cmd() local
651 u32 *cpuvaddr = se->cmdbuf->addr, op; in tegra_gcm_crypt_prep_cmd()
673 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_gcm_crypt_prep_cmd()
677 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_gcm_crypt_prep_cmd()
681 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_gcm_crypt_prep_cmd()
695 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_gcm_crypt_prep_cmd()
700 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_gcm_crypt_prep_cmd()
702 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); in tegra_gcm_crypt_prep_cmd()
706 static int tegra_gcm_prep_final_cmd(struct tegra_se *se, u32 *cpuvaddr, in tegra_gcm_prep_final_cmd() argument
721 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->aad_len, 2); in tegra_gcm_prep_final_cmd()
725 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->cryp_msg_len, 2); in tegra_gcm_prep_final_cmd()
730 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_gcm_prep_final_cmd()
734 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_gcm_prep_final_cmd()
745 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_gcm_prep_final_cmd()
750 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_gcm_prep_final_cmd()
752 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); in tegra_gcm_prep_final_cmd()
759 struct tegra_se *se = ctx->se; in tegra_gcm_do_gmac() local
771 return tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_gcm_do_gmac()
776 struct tegra_se *se = ctx->se; in tegra_gcm_do_crypt() local
788 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_gcm_do_crypt()
801 struct tegra_se *se = ctx->se; in tegra_gcm_do_final() local
802 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_gcm_do_final()
810 cmdlen = tegra_gcm_prep_final_cmd(se, cpuvaddr, rctx); in tegra_gcm_do_final()
811 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_gcm_do_final()
825 static int tegra_gcm_do_verify(struct tegra_se *se, struct tegra_aead_reqctx *rctx) in tegra_gcm_do_verify() argument
857 struct tegra_se *se = ctx->se; in tegra_cbcmac_prep_cmd() local
858 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_cbcmac_prep_cmd()
862 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_cbcmac_prep_cmd()
865 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_cbcmac_prep_cmd()
877 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_cbcmac_prep_cmd()
883 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_cbcmac_prep_cmd()
892 struct tegra_se *se = ctx->se; in tegra_ctr_prep_cmd() local
893 u32 *cpuvaddr = se->cmdbuf->addr; in tegra_ctr_prep_cmd()
896 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_ctr_prep_cmd()
900 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_ctr_prep_cmd()
902 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_ctr_prep_cmd()
916 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_ctr_prep_cmd()
922 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_ctr_prep_cmd()
924 dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", in tegra_ctr_prep_cmd()
932 struct tegra_se *se = ctx->se; in tegra_ccm_do_cbcmac() local
943 return tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_ccm_do_cbcmac()
1049 static int tegra_ccm_mac_result(struct tegra_se *se, struct tegra_aead_reqctx *rctx) in tegra_ccm_mac_result() argument
1056 result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_ccm_mac_result()
1059 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_ccm_mac_result()
1072 static int tegra_ccm_ctr_result(struct tegra_se *se, struct tegra_aead_reqctx *rctx) in tegra_ccm_ctr_result() argument
1090 struct tegra_se *se = ctx->se; in tegra_ccm_compute_auth() local
1113 return tegra_ccm_mac_result(se, rctx); in tegra_ccm_compute_auth()
1118 struct tegra_se *se = ctx->se; in tegra_ccm_do_ctr() local
1150 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_ccm_do_ctr()
1154 return tegra_ccm_ctr_result(se, rctx); in tegra_ccm_do_ctr()
1157 static int tegra_ccm_crypt_init(struct aead_request *req, struct tegra_se *se, in tegra_ccm_crypt_init() argument
1188 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_ccm_crypt_init()
1199 struct tegra_se *se = ctx->se; in tegra_ccm_do_one_req() local
1202 ret = tegra_ccm_crypt_init(req, se, rctx); in tegra_ccm_do_one_req()
1210 rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_ccm_do_one_req()
1216 rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_ccm_do_one_req()
1224 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, in tegra_ccm_do_one_req()
1253 dma_free_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_ccm_do_one_req()
1257 dma_free_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_ccm_do_one_req()
1261 tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); in tegra_ccm_do_one_req()
1264 crypto_finalize_aead_request(ctx->se->engine, req, ret); in tegra_ccm_do_one_req()
1294 rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_gcm_do_one_req()
1302 rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_gcm_do_one_req()
1310 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, in tegra_gcm_do_one_req()
1336 ret = tegra_gcm_do_verify(ctx->se, rctx); in tegra_gcm_do_one_req()
1339 dma_free_coherent(ctx->se->dev, rctx->outbuf.size, in tegra_gcm_do_one_req()
1343 dma_free_coherent(ctx->se->dev, rctx->inbuf.size, in tegra_gcm_do_one_req()
1347 tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); in tegra_gcm_do_one_req()
1350 crypto_finalize_aead_request(ctx->se->engine, req, ret); in tegra_gcm_do_one_req()
1369 ctx->se = se_alg->se_dev; in tegra_aead_cra_init()
1375 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_aead_cra_init()
1425 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); in tegra_aead_cra_exit()
1436 return crypto_transfer_aead_request_to_engine(ctx->se->engine, req); in tegra_aead_crypt()
1456 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_aead_setkey()
1460 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); in tegra_aead_setkey()
1473 struct tegra_se *se = ctx->se; in tegra_cmac_prep_cmd() local
1474 u32 *cpuvaddr = se->cmdbuf->addr, op; in tegra_cmac_prep_cmd()
1492 cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr); in tegra_cmac_prep_cmd()
1498 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); in tegra_cmac_prep_cmd()
1502 cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); in tegra_cmac_prep_cmd()
1513 cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); in tegra_cmac_prep_cmd()
1518 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); in tegra_cmac_prep_cmd()
1523 static void tegra_cmac_copy_result(struct tegra_se *se, struct tegra_cmac_reqctx *rctx) in tegra_cmac_copy_result() argument
1528 rctx->result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_copy_result()
1531 static void tegra_cmac_paste_result(struct tegra_se *se, struct tegra_cmac_reqctx *rctx) in tegra_cmac_paste_result() argument
1537 se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_paste_result()
1545 struct tegra_se *se = ctx->se; in tegra_cmac_do_init() local
1555 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size * 2, in tegra_cmac_do_init()
1564 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_do_init()
1574 struct tegra_se *se = ctx->se; in tegra_cmac_do_update() local
1610 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, in tegra_cmac_do_update()
1633 tegra_cmac_paste_result(ctx->se, rctx); in tegra_cmac_do_update()
1636 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_cmac_do_update()
1638 tegra_cmac_copy_result(ctx->se, rctx); in tegra_cmac_do_update()
1640 dma_free_coherent(ctx->se->dev, rctx->datbuf.size, in tegra_cmac_do_update()
1651 struct tegra_se *se = ctx->se; in tegra_cmac_do_final() local
1661 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size, in tegra_cmac_do_final()
1680 tegra_cmac_paste_result(ctx->se, rctx); in tegra_cmac_do_final()
1684 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); in tegra_cmac_do_final()
1690 result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_do_final()
1693 writel(0, se->base + se->hw->regs->result + (i * 4)); in tegra_cmac_do_final()
1697 dma_free_coherent(se->dev, rctx->datbuf.size, in tegra_cmac_do_final()
1700 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm) * 2, in tegra_cmac_do_final()
1711 struct tegra_se *se = ctx->se; in tegra_cmac_do_one_req() local
1723 ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, in tegra_cmac_do_one_req()
1746 tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); in tegra_cmac_do_one_req()
1748 crypto_finalize_hash_request(se->engine, req, ret); in tegra_cmac_do_one_req()
1761 dev_warn(ctx->se->dev, "failed to allocate fallback for %s\n", algname); in tegra_cmac_init_fallback()
1786 ctx->se = se_alg->se_dev; in tegra_cmac_cra_init()
1792 dev_err(ctx->se->dev, "invalid algorithm\n"); in tegra_cmac_cra_init()
1810 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg); in tegra_cmac_cra_exit()
1820 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); in tegra_cmac_setkey()
1827 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); in tegra_cmac_setkey()
1844 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_init()
1855 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_update()
1866 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_final()
1877 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_finup()
1888 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req); in tegra_cmac_digest()
1972 .cra_driver_name = "tegra-se-cmac",
1986 int tegra_init_aes(struct tegra_se *se) in tegra_init_aes() argument
1993 se->manifest = tegra_aes_kac_manifest; in tegra_init_aes()
1997 tegra_aes_algs[i].se_dev = se; in tegra_init_aes()
2001 dev_err(se->dev, "failed to register %s\n", in tegra_init_aes()
2009 tegra_aead_algs[i].se_dev = se; in tegra_init_aes()
2013 dev_err(se->dev, "failed to register %s\n", in tegra_init_aes()
2021 tegra_cmac_algs[i].se_dev = se; in tegra_init_aes()
2025 dev_err(se->dev, "failed to register %s\n", in tegra_init_aes()
2050 void tegra_deinit_aes(struct tegra_se *se) in tegra_deinit_aes() argument