Lines Matching +full:p +full:- +full:384

1 // SPDX-License-Identifier: GPL-2.0
14 #include <linux/dma-mapping.h>
33 #define HPRE_INVLD_REQ_ID (-1)
66 /* low address: e--->n */
70 /* low address: d--->n */
74 /* low address: dq->dp->q->p->qinv */
84 * ya = g^xa mod p; [RFC2631 sec 2.1.1]
87 * ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]
88 * low address: d--->n, please refer to Hisilicon HPRE UM
98 /* low address: p->a->k->b */
99 unsigned char *p; member
102 /* low address: x->y */
108 /* low address: p->a->k */
109 unsigned char *p; member
155 return ((crypto_dma_align() - 1) | (HPRE_ALIGN_SZ - 1)) + 1; in hpre_align_sz()
160 return (hpre_align_sz() - 1) & ~(crypto_tfm_ctx_alignment() - 1); in hpre_align_pd()
168 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_alloc_req_id()
169 id = idr_alloc(&ctx->req_idr, NULL, 0, ctx->qp->sq_depth, GFP_ATOMIC); in hpre_alloc_req_id()
170 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_alloc_req_id()
179 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_free_req_id()
180 idr_remove(&ctx->req_idr, req_id); in hpre_free_req_id()
181 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_free_req_id()
190 ctx = hpre_req->ctx; in hpre_add_req_to_ctx()
193 return -EINVAL; in hpre_add_req_to_ctx()
195 ctx->req_list[id] = hpre_req; in hpre_add_req_to_ctx()
196 hpre_req->req_id = id; in hpre_add_req_to_ctx()
198 dfx = ctx->hpre->debug.dfx; in hpre_add_req_to_ctx()
200 ktime_get_ts64(&hpre_req->req_time); in hpre_add_req_to_ctx()
207 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_rm_req_from_ctx()
208 int id = hpre_req->req_id; in hpre_rm_req_from_ctx()
210 if (hpre_req->req_id >= 0) { in hpre_rm_req_from_ctx()
211 hpre_req->req_id = HPRE_INVLD_REQ_ID; in hpre_rm_req_from_ctx()
212 ctx->req_list[id] = NULL; in hpre_rm_req_from_ctx()
225 return ERR_PTR(-ENODEV); in hpre_get_qp_and_start()
231 pci_err(qp->qm->pdev, "Can not start qp!\n"); in hpre_get_qp_and_start()
232 return ERR_PTR(-EINVAL); in hpre_get_qp_and_start()
242 struct device *dev = hpre_req->ctx->dev; in hpre_get_data_dma_addr()
246 hpre_req->src = NULL; in hpre_get_data_dma_addr()
249 hpre_req->dst = NULL; in hpre_get_data_dma_addr()
255 return -ENOMEM; in hpre_get_data_dma_addr()
265 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_prepare_dma_buf()
266 struct device *dev = ctx->dev; in hpre_prepare_dma_buf()
270 shift = ctx->key_sz - len; in hpre_prepare_dma_buf()
272 return -EINVAL; in hpre_prepare_dma_buf()
274 ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC); in hpre_prepare_dma_buf()
276 return -ENOMEM; in hpre_prepare_dma_buf()
280 hpre_req->src = ptr; in hpre_prepare_dma_buf()
282 hpre_req->dst = ptr; in hpre_prepare_dma_buf()
292 struct hpre_sqe *msg = &hpre_req->req; in hpre_hw_data_init()
293 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_hw_data_init()
298 if ((sg_is_last(data) && len == ctx->key_sz) && in hpre_hw_data_init()
308 msg->in = cpu_to_le64(tmp); in hpre_hw_data_init()
310 msg->out = cpu_to_le64(tmp); in hpre_hw_data_init()
320 struct device *dev = ctx->dev; in hpre_hw_data_clr_all()
321 struct hpre_sqe *sqe = &req->req; in hpre_hw_data_clr_all()
324 tmp = le64_to_cpu(sqe->in); in hpre_hw_data_clr_all()
329 if (req->src) in hpre_hw_data_clr_all()
330 dma_free_coherent(dev, ctx->key_sz, req->src, tmp); in hpre_hw_data_clr_all()
332 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE); in hpre_hw_data_clr_all()
335 tmp = le64_to_cpu(sqe->out); in hpre_hw_data_clr_all()
339 if (req->dst) { in hpre_hw_data_clr_all()
341 scatterwalk_map_and_copy(req->dst, dst, 0, in hpre_hw_data_clr_all()
342 ctx->key_sz, 1); in hpre_hw_data_clr_all()
343 dma_free_coherent(dev, ctx->key_sz, req->dst, tmp); in hpre_hw_data_clr_all()
345 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE); in hpre_hw_data_clr_all()
361 id = (int)le16_to_cpu(sqe->tag); in hpre_alg_res_post_hf()
362 req = ctx->req_list[id]; in hpre_alg_res_post_hf()
366 err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) & in hpre_alg_res_post_hf()
369 done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) & in hpre_alg_res_post_hf()
375 alg = le32_to_cpu(sqe->dw0) & HREE_ALG_TYPE_MASK; in hpre_alg_res_post_hf()
376 dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n", in hpre_alg_res_post_hf()
379 return -EINVAL; in hpre_alg_res_post_hf()
387 return -EINVAL; in hpre_ctx_set()
389 spin_lock_init(&ctx->req_lock); in hpre_ctx_set()
390 ctx->qp = qp; in hpre_ctx_set()
391 ctx->dev = &qp->qm->pdev->dev; in hpre_ctx_set()
393 hpre = container_of(ctx->qp->qm, struct hpre, qm); in hpre_ctx_set()
394 ctx->hpre = hpre; in hpre_ctx_set()
395 ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL); in hpre_ctx_set()
396 if (!ctx->req_list) in hpre_ctx_set()
397 return -ENOMEM; in hpre_ctx_set()
398 ctx->key_sz = 0; in hpre_ctx_set()
399 ctx->crt_g2_mode = false; in hpre_ctx_set()
400 idr_init(&ctx->req_idr); in hpre_ctx_set()
408 idr_destroy(&ctx->req_idr); in hpre_ctx_clear()
409 kfree(ctx->req_list); in hpre_ctx_clear()
410 hisi_qm_free_qps(&ctx->qp, 1); in hpre_ctx_clear()
413 ctx->crt_g2_mode = false; in hpre_ctx_clear()
414 ctx->key_sz = 0; in hpre_ctx_clear()
424 time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) * in hpre_is_bd_timeout()
426 (reply_time.tv_nsec - req->req_time.tv_nsec) / in hpre_is_bd_timeout()
437 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_dh_cb()
444 areq = req->areq.dh; in hpre_dh_cb()
445 areq->dst_len = ctx->key_sz; in hpre_dh_cb()
451 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_dh_cb()
458 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_rsa_cb()
470 areq = req->areq.rsa; in hpre_rsa_cb()
471 areq->dst_len = ctx->key_sz; in hpre_rsa_cb()
472 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_rsa_cb()
479 struct hpre_ctx *ctx = qp->qp_ctx; in hpre_alg_cb()
480 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_alg_cb()
482 struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)]; in hpre_alg_cb()
489 req->cb(ctx, resp); in hpre_alg_cb()
507 qp->qp_ctx = ctx; in hpre_ctx_init()
508 qp->req_cb = hpre_alg_cb; in hpre_ctx_init()
510 ret = hpre_ctx_set(ctx, qp, qp->sq_depth); in hpre_ctx_init()
527 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
528 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
529 return -EOVERFLOW; in hpre_msg_request_set()
534 h_req->cb = hpre_rsa_cb; in hpre_msg_request_set()
535 h_req->areq.rsa = akreq; in hpre_msg_request_set()
536 msg = &h_req->req; in hpre_msg_request_set()
541 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
542 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
543 return -EOVERFLOW; in hpre_msg_request_set()
548 h_req->cb = hpre_dh_cb; in hpre_msg_request_set()
549 h_req->areq.dh = kreq; in hpre_msg_request_set()
550 msg = &h_req->req; in hpre_msg_request_set()
552 msg->key = cpu_to_le64(ctx->dh.dma_xa_p); in hpre_msg_request_set()
555 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
556 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
557 msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT); in hpre_msg_request_set()
558 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_msg_request_set()
559 h_req->ctx = ctx; in hpre_msg_request_set()
563 return -EBUSY; in hpre_msg_request_set()
565 msg->tag = cpu_to_le16((u16)req_id); in hpre_msg_request_set()
572 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_send()
578 spin_lock_bh(&ctx->req_lock); in hpre_send()
579 ret = hisi_qp_send(ctx->qp, msg); in hpre_send()
580 spin_unlock_bh(&ctx->req_lock); in hpre_send()
581 if (ret != -EBUSY) in hpre_send()
589 if (ret != -EBUSY) in hpre_send()
601 struct hpre_sqe *msg = &hpre_req->req; in hpre_dh_compute_value()
608 if (req->src) { in hpre_dh_compute_value()
609 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1); in hpre_dh_compute_value()
613 msg->in = cpu_to_le64(ctx->dh.dma_g); in hpre_dh_compute_value()
616 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1); in hpre_dh_compute_value()
620 if (ctx->crt_g2_mode && !req->src) in hpre_dh_compute_value()
621 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2); in hpre_dh_compute_value()
623 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH); in hpre_dh_compute_value()
628 return -EINPROGRESS; in hpre_dh_compute_value()
632 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_dh_compute_value()
654 return -EINVAL; in hpre_is_dh_params_length_valid()
660 struct device *dev = ctx->dev; in hpre_dh_set_params()
663 if (params->p_size > HPRE_DH_MAX_P_SZ) in hpre_dh_set_params()
664 return -EINVAL; in hpre_dh_set_params()
666 if (hpre_is_dh_params_length_valid(params->p_size << in hpre_dh_set_params()
668 return -EINVAL; in hpre_dh_set_params()
670 sz = ctx->key_sz = params->p_size; in hpre_dh_set_params()
671 ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1, in hpre_dh_set_params()
672 &ctx->dh.dma_xa_p, GFP_KERNEL); in hpre_dh_set_params()
673 if (!ctx->dh.xa_p) in hpre_dh_set_params()
674 return -ENOMEM; in hpre_dh_set_params()
676 memcpy(ctx->dh.xa_p + sz, params->p, sz); in hpre_dh_set_params()
679 if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) { in hpre_dh_set_params()
680 ctx->crt_g2_mode = true; in hpre_dh_set_params()
684 ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL); in hpre_dh_set_params()
685 if (!ctx->dh.g) { in hpre_dh_set_params()
686 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_set_params()
687 ctx->dh.dma_xa_p); in hpre_dh_set_params()
688 ctx->dh.xa_p = NULL; in hpre_dh_set_params()
689 return -ENOMEM; in hpre_dh_set_params()
692 memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size); in hpre_dh_set_params()
699 struct device *dev = ctx->dev; in hpre_dh_clear_ctx()
700 unsigned int sz = ctx->key_sz; in hpre_dh_clear_ctx()
703 hisi_qm_stop_qp(ctx->qp); in hpre_dh_clear_ctx()
705 if (ctx->dh.g) { in hpre_dh_clear_ctx()
706 dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g); in hpre_dh_clear_ctx()
707 ctx->dh.g = NULL; in hpre_dh_clear_ctx()
710 if (ctx->dh.xa_p) { in hpre_dh_clear_ctx()
711 memzero_explicit(ctx->dh.xa_p, sz); in hpre_dh_clear_ctx()
712 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_clear_ctx()
713 ctx->dh.dma_xa_p); in hpre_dh_clear_ctx()
714 ctx->dh.xa_p = NULL; in hpre_dh_clear_ctx()
728 return -EINVAL; in hpre_dh_set_secret()
737 memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key, in hpre_dh_set_secret()
751 return ctx->key_sz; in hpre_dh_max_size()
774 (*len)--; in hpre_rsa_drop_leading_zeros()
804 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_enc()
808 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_enc()
809 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_enc()
810 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_enc()
816 if (unlikely(!ctx->rsa.pubkey)) in hpre_rsa_enc()
817 return -EINVAL; in hpre_rsa_enc()
823 msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT); in hpre_rsa_enc()
824 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey); in hpre_rsa_enc()
826 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_enc()
830 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_enc()
837 return -EINPROGRESS; in hpre_rsa_enc()
841 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_enc()
852 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_dec()
856 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_dec()
857 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_dec()
858 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_dec()
864 if (unlikely(!ctx->rsa.prikey)) in hpre_rsa_dec()
865 return -EINVAL; in hpre_rsa_dec()
871 if (ctx->crt_g2_mode) { in hpre_rsa_dec()
872 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey); in hpre_rsa_dec()
873 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
876 msg->key = cpu_to_le64(ctx->rsa.dma_prikey); in hpre_rsa_dec()
877 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
881 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_dec()
885 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_dec()
892 return -EINPROGRESS; in hpre_rsa_dec()
896 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_dec()
908 ctx->key_sz = vlen; in hpre_rsa_set_n()
911 if (!hpre_rsa_key_size_is_support(ctx->key_sz)) in hpre_rsa_set_n()
914 ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
915 &ctx->rsa.dma_pubkey, in hpre_rsa_set_n()
917 if (!ctx->rsa.pubkey) in hpre_rsa_set_n()
918 return -ENOMEM; in hpre_rsa_set_n()
921 ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
922 &ctx->rsa.dma_prikey, in hpre_rsa_set_n()
924 if (!ctx->rsa.prikey) { in hpre_rsa_set_n()
925 dma_free_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
926 ctx->rsa.pubkey, in hpre_rsa_set_n()
927 ctx->rsa.dma_pubkey); in hpre_rsa_set_n()
928 ctx->rsa.pubkey = NULL; in hpre_rsa_set_n()
929 return -ENOMEM; in hpre_rsa_set_n()
931 memcpy(ctx->rsa.prikey + vlen, ptr, vlen); in hpre_rsa_set_n()
933 memcpy(ctx->rsa.pubkey + vlen, ptr, vlen); in hpre_rsa_set_n()
946 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_e()
947 return -EINVAL; in hpre_rsa_set_e()
949 memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_e()
961 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_d()
962 return -EINVAL; in hpre_rsa_set_d()
964 memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_d()
977 return -EINVAL; in hpre_crt_para_get()
979 memcpy(para + para_sz - len, ptr, len); in hpre_crt_para_get()
986 unsigned int hlf_ksz = ctx->key_sz >> 1; in hpre_rsa_setkey_crt()
987 struct device *dev = ctx->dev; in hpre_rsa_setkey_crt()
991 ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, in hpre_rsa_setkey_crt()
992 &ctx->rsa.dma_crt_prikey, in hpre_rsa_setkey_crt()
994 if (!ctx->rsa.crt_prikey) in hpre_rsa_setkey_crt()
995 return -ENOMEM; in hpre_rsa_setkey_crt()
997 ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz, in hpre_rsa_setkey_crt()
998 rsa_key->dq, rsa_key->dq_sz); in hpre_rsa_setkey_crt()
1003 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1004 rsa_key->dp, rsa_key->dp_sz); in hpre_rsa_setkey_crt()
1009 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1010 rsa_key->q, rsa_key->q_sz); in hpre_rsa_setkey_crt()
1015 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1016 rsa_key->p, rsa_key->p_sz); in hpre_rsa_setkey_crt()
1021 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1022 rsa_key->qinv, rsa_key->qinv_sz); in hpre_rsa_setkey_crt()
1026 ctx->crt_g2_mode = true; in hpre_rsa_setkey_crt()
1032 memzero_explicit(ctx->rsa.crt_prikey, offset); in hpre_rsa_setkey_crt()
1033 dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey, in hpre_rsa_setkey_crt()
1034 ctx->rsa.dma_crt_prikey); in hpre_rsa_setkey_crt()
1035 ctx->rsa.crt_prikey = NULL; in hpre_rsa_setkey_crt()
1036 ctx->crt_g2_mode = false; in hpre_rsa_setkey_crt()
1044 unsigned int half_key_sz = ctx->key_sz >> 1; in hpre_rsa_clear_ctx()
1045 struct device *dev = ctx->dev; in hpre_rsa_clear_ctx()
1048 hisi_qm_stop_qp(ctx->qp); in hpre_rsa_clear_ctx()
1050 if (ctx->rsa.pubkey) { in hpre_rsa_clear_ctx()
1051 dma_free_coherent(dev, ctx->key_sz << 1, in hpre_rsa_clear_ctx()
1052 ctx->rsa.pubkey, ctx->rsa.dma_pubkey); in hpre_rsa_clear_ctx()
1053 ctx->rsa.pubkey = NULL; in hpre_rsa_clear_ctx()
1056 if (ctx->rsa.crt_prikey) { in hpre_rsa_clear_ctx()
1057 memzero_explicit(ctx->rsa.crt_prikey, in hpre_rsa_clear_ctx()
1060 ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey); in hpre_rsa_clear_ctx()
1061 ctx->rsa.crt_prikey = NULL; in hpre_rsa_clear_ctx()
1064 if (ctx->rsa.prikey) { in hpre_rsa_clear_ctx()
1065 memzero_explicit(ctx->rsa.prikey, ctx->key_sz); in hpre_rsa_clear_ctx()
1066 dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey, in hpre_rsa_clear_ctx()
1067 ctx->rsa.dma_prikey); in hpre_rsa_clear_ctx()
1068 ctx->rsa.prikey = NULL; in hpre_rsa_clear_ctx()
1076 * CRT: return true, N-CRT: return false .
1080 u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz + in hpre_is_crt_key()
1081 key->qinv_sz; in hpre_is_crt_key()
1085 /* N-CRT less than 5 parameters */ in hpre_is_crt_key()
1124 if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) { in hpre_rsa_setkey()
1125 ret = -EINVAL; in hpre_rsa_setkey()
1142 ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setpubkey()
1155 ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setprivkey()
1167 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_max_size()
1168 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) in hpre_rsa_max_size()
1169 return crypto_akcipher_maxsize(ctx->rsa.soft_tfm); in hpre_rsa_max_size()
1171 return ctx->key_sz; in hpre_rsa_max_size()
1179 ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0); in hpre_rsa_init_tfm()
1180 if (IS_ERR(ctx->rsa.soft_tfm)) { in hpre_rsa_init_tfm()
1182 return PTR_ERR(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1190 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1200 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_exit_tfm()
1208 j = len - i - 1; in hpre_key_to_big_end()
1216 struct device *dev = ctx->dev; in hpre_ecc_clear_ctx()
1217 unsigned int sz = ctx->key_sz; in hpre_ecc_clear_ctx()
1221 hisi_qm_stop_qp(ctx->qp); in hpre_ecc_clear_ctx()
1223 if (is_ecdh && ctx->ecdh.p) { in hpre_ecc_clear_ctx()
1224 /* ecdh: p->a->k->b */ in hpre_ecc_clear_ctx()
1225 memzero_explicit(ctx->ecdh.p + shift, sz); in hpre_ecc_clear_ctx()
1226 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecc_clear_ctx()
1227 ctx->ecdh.p = NULL; in hpre_ecc_clear_ctx()
1228 } else if (!is_ecdh && ctx->curve25519.p) { in hpre_ecc_clear_ctx()
1229 /* curve25519: p->a->k */ in hpre_ecc_clear_ctx()
1230 memzero_explicit(ctx->curve25519.p + shift, sz); in hpre_ecc_clear_ctx()
1231 dma_free_coherent(dev, sz << 2, ctx->curve25519.p, in hpre_ecc_clear_ctx()
1232 ctx->curve25519.dma_p); in hpre_ecc_clear_ctx()
1233 ctx->curve25519.p = NULL; in hpre_ecc_clear_ctx()
1240 * The bits of 192/224/256/384/521 are supported by HPRE,
1242 * bits<=256, bits=256; 256<bits<=384, bits=384; 384<bits<=576, bits=576;
1244 * high-order zeros by soft, so TASK_LENGTH1 is 0x3/0x5/0x8;
1263 unsigned int sz = cur_sz - (ndigits - 1) * sizeof(u64); in fill_curve_param()
1266 while (i < ndigits - 1) { in fill_curve_param()
1271 memcpy(addr + sizeof(u64) * i, &param[ndigits - 1], sz); in fill_curve_param()
1278 unsigned int shifta = ctx->key_sz << 1; in hpre_ecdh_fill_curve()
1279 unsigned int shiftb = ctx->key_sz << 2; in hpre_ecdh_fill_curve()
1280 void *p = ctx->ecdh.p + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve() local
1281 void *a = ctx->ecdh.p + shifta - cur_sz; in hpre_ecdh_fill_curve()
1282 void *b = ctx->ecdh.p + shiftb - cur_sz; in hpre_ecdh_fill_curve()
1283 void *x = ctx->ecdh.g + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve()
1284 void *y = ctx->ecdh.g + shifta - cur_sz; in hpre_ecdh_fill_curve()
1285 const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id); in hpre_ecdh_fill_curve()
1289 return -EINVAL; in hpre_ecdh_fill_curve()
1291 n = kzalloc(ctx->key_sz, GFP_KERNEL); in hpre_ecdh_fill_curve()
1293 return -ENOMEM; in hpre_ecdh_fill_curve()
1295 fill_curve_param(p, curve->p, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1296 fill_curve_param(a, curve->a, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1297 fill_curve_param(b, curve->b, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1298 fill_curve_param(x, curve->g.x, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1299 fill_curve_param(y, curve->g.y, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1300 fill_curve_param(n, curve->n, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1302 if (params->key_size == cur_sz && memcmp(params->key, n, cur_sz) >= 0) { in hpre_ecdh_fill_curve()
1304 return -EINVAL; in hpre_ecdh_fill_curve()
1329 struct device *dev = ctx->dev; in hpre_ecdh_set_param()
1333 ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id); in hpre_ecdh_set_param()
1334 if (!ctx->key_sz) in hpre_ecdh_set_param()
1335 return -EINVAL; in hpre_ecdh_set_param()
1337 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_param()
1338 if (!curve_sz || params->key_size > curve_sz) in hpre_ecdh_set_param()
1339 return -EINVAL; in hpre_ecdh_set_param()
1341 sz = ctx->key_sz; in hpre_ecdh_set_param()
1343 if (!ctx->ecdh.p) { in hpre_ecdh_set_param()
1344 ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p, in hpre_ecdh_set_param()
1346 if (!ctx->ecdh.p) in hpre_ecdh_set_param()
1347 return -ENOMEM; in hpre_ecdh_set_param()
1351 ctx->ecdh.g = ctx->ecdh.p + shift; in hpre_ecdh_set_param()
1352 ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift; in hpre_ecdh_set_param()
1357 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecdh_set_param()
1358 ctx->ecdh.p = NULL; in hpre_ecdh_set_param()
1378 struct device *dev = ctx->dev; in ecdh_gen_privkey()
1387 ret = crypto_rng_get_bytes(crypto_default_rng, (u8 *)params->key, in ecdh_gen_privkey()
1388 params->key_size); in ecdh_gen_privkey()
1401 struct device *dev = ctx->dev; in hpre_ecdh_set_secret()
1408 return -EINVAL; in hpre_ecdh_set_secret()
1414 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_secret()
1417 return -EINVAL; in hpre_ecdh_set_secret()
1420 params.key_size = curve_sz - 1; in hpre_ecdh_set_secret()
1428 return -EINVAL; in hpre_ecdh_set_secret()
1439 sz = ctx->key_sz; in hpre_ecdh_set_secret()
1440 sz_shift = (sz << 1) + sz - params.key_size; in hpre_ecdh_set_secret()
1441 memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size); in hpre_ecdh_set_secret()
1451 struct device *dev = ctx->dev; in hpre_ecdh_hw_data_clr_all()
1452 struct hpre_sqe *sqe = &req->req; in hpre_ecdh_hw_data_clr_all()
1455 dma = le64_to_cpu(sqe->in); in hpre_ecdh_hw_data_clr_all()
1459 if (src && req->src) in hpre_ecdh_hw_data_clr_all()
1460 dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma); in hpre_ecdh_hw_data_clr_all()
1462 dma = le64_to_cpu(sqe->out); in hpre_ecdh_hw_data_clr_all()
1466 if (req->dst) in hpre_ecdh_hw_data_clr_all()
1467 dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma); in hpre_ecdh_hw_data_clr_all()
1469 dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE); in hpre_ecdh_hw_data_clr_all()
1474 unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_cb()
1475 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_ecdh_cb()
1479 char *p; in hpre_ecdh_cb() local
1483 areq = req->areq.ecdh; in hpre_ecdh_cb()
1484 areq->dst_len = ctx->key_sz << 1; in hpre_ecdh_cb()
1490 p = sg_virt(areq->dst); in hpre_ecdh_cb()
1491 memmove(p, p + ctx->key_sz - curve_sz, curve_sz); in hpre_ecdh_cb()
1492 memmove(p + curve_sz, p + areq->dst_len - curve_sz, curve_sz); in hpre_ecdh_cb()
1494 hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_ecdh_cb()
1508 if (req->dst_len < ctx->key_sz << 1) { in hpre_ecdh_msg_request_set()
1509 req->dst_len = ctx->key_sz << 1; in hpre_ecdh_msg_request_set()
1510 return -EINVAL; in hpre_ecdh_msg_request_set()
1515 h_req->cb = hpre_ecdh_cb; in hpre_ecdh_msg_request_set()
1516 h_req->areq.ecdh = req; in hpre_ecdh_msg_request_set()
1517 msg = &h_req->req; in hpre_ecdh_msg_request_set()
1519 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1520 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1521 msg->key = cpu_to_le64(ctx->ecdh.dma_p); in hpre_ecdh_msg_request_set()
1523 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_ecdh_msg_request_set()
1524 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_ecdh_msg_request_set()
1525 h_req->ctx = ctx; in hpre_ecdh_msg_request_set()
1529 return -EBUSY; in hpre_ecdh_msg_request_set()
1531 msg->tag = cpu_to_le16((u16)req_id); in hpre_ecdh_msg_request_set()
1538 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_src_data_init()
1539 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_src_data_init()
1540 struct device *dev = ctx->dev; in hpre_ecdh_src_data_init()
1547 shift = ctx->key_sz - (len >> 1); in hpre_ecdh_src_data_init()
1549 return -EINVAL; in hpre_ecdh_src_data_init()
1551 ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL); in hpre_ecdh_src_data_init()
1553 return -ENOMEM; in hpre_ecdh_src_data_init()
1555 tmpshift = ctx->key_sz << 1; in hpre_ecdh_src_data_init()
1558 memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1); in hpre_ecdh_src_data_init()
1560 hpre_req->src = ptr; in hpre_ecdh_src_data_init()
1561 msg->in = cpu_to_le64(dma); in hpre_ecdh_src_data_init()
1568 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_dst_data_init()
1569 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_dst_data_init()
1570 struct device *dev = ctx->dev; in hpre_ecdh_dst_data_init()
1573 if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) { in hpre_ecdh_dst_data_init()
1575 return -EINVAL; in hpre_ecdh_dst_data_init()
1578 hpre_req->dst = NULL; in hpre_ecdh_dst_data_init()
1582 return -ENOMEM; in hpre_ecdh_dst_data_init()
1585 msg->out = cpu_to_le64(dma); in hpre_ecdh_dst_data_init()
1593 struct device *dev = ctx->dev; in hpre_ecdh_compute_value()
1596 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_compute_value()
1605 if (req->src) { in hpre_ecdh_compute_value()
1606 ret = hpre_ecdh_src_data_init(hpre_req, req->src, req->src_len); in hpre_ecdh_compute_value()
1612 msg->in = cpu_to_le64(ctx->ecdh.dma_g); in hpre_ecdh_compute_value()
1615 ret = hpre_ecdh_dst_data_init(hpre_req, req->dst, req->dst_len); in hpre_ecdh_compute_value()
1621 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_ECC_MUL); in hpre_ecdh_compute_value()
1624 return -EINPROGRESS; in hpre_ecdh_compute_value()
1628 hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_ecdh_compute_value()
1637 return ctx->key_sz << 1; in hpre_ecdh_max_size()
1644 ctx->curve_id = ECC_CURVE_NIST_P192; in hpre_ecdh_nist_p192_init_tfm()
1655 ctx->curve_id = ECC_CURVE_NIST_P256; in hpre_ecdh_nist_p256_init_tfm()
1666 ctx->curve_id = ECC_CURVE_NIST_P384; in hpre_ecdh_nist_p384_init_tfm()
1684 unsigned int sz = ctx->key_sz; in hpre_curve25519_fill_curve()
1687 void *p; in hpre_curve25519_fill_curve() local
1690 * The key from 'buf' is in little-endian, we should preprocess it as in hpre_curve25519_fill_curve()
1693 * the same as the software curve-25519 that exists in crypto. in hpre_curve25519_fill_curve()
1699 p = ctx->curve25519.p + sz - len; in hpre_curve25519_fill_curve()
1704 fill_curve_param(p, curve->p, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1705 fill_curve_param(p + sz, curve->a, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1706 memcpy(p + shift, secret, len); in hpre_curve25519_fill_curve()
1707 fill_curve_param(p + shift + sz, curve->g.x, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1714 struct device *dev = ctx->dev; in hpre_curve25519_set_param()
1715 unsigned int sz = ctx->key_sz; in hpre_curve25519_set_param()
1718 /* p->a->k->gx */ in hpre_curve25519_set_param()
1719 if (!ctx->curve25519.p) { in hpre_curve25519_set_param()
1720 ctx->curve25519.p = dma_alloc_coherent(dev, sz << 2, in hpre_curve25519_set_param()
1721 &ctx->curve25519.dma_p, in hpre_curve25519_set_param()
1723 if (!ctx->curve25519.p) in hpre_curve25519_set_param()
1724 return -ENOMEM; in hpre_curve25519_set_param()
1727 ctx->curve25519.g = ctx->curve25519.p + shift + sz; in hpre_curve25519_set_param()
1728 ctx->curve25519.dma_g = ctx->curve25519.dma_p + shift + sz; in hpre_curve25519_set_param()
1739 struct device *dev = ctx->dev; in hpre_curve25519_set_secret()
1740 int ret = -EINVAL; in hpre_curve25519_set_secret()
1751 ctx->key_sz = CURVE25519_KEY_SIZE; in hpre_curve25519_set_secret()
1767 struct device *dev = ctx->dev; in hpre_curve25519_hw_data_clr_all()
1768 struct hpre_sqe *sqe = &req->req; in hpre_curve25519_hw_data_clr_all()
1771 dma = le64_to_cpu(sqe->in); in hpre_curve25519_hw_data_clr_all()
1775 if (src && req->src) in hpre_curve25519_hw_data_clr_all()
1776 dma_free_coherent(dev, ctx->key_sz, req->src, dma); in hpre_curve25519_hw_data_clr_all()
1778 dma = le64_to_cpu(sqe->out); in hpre_curve25519_hw_data_clr_all()
1782 if (req->dst) in hpre_curve25519_hw_data_clr_all()
1783 dma_free_coherent(dev, ctx->key_sz, req->dst, dma); in hpre_curve25519_hw_data_clr_all()
1785 dma_unmap_single(dev, dma, ctx->key_sz, DMA_FROM_DEVICE); in hpre_curve25519_hw_data_clr_all()
1790 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_curve25519_cb()
1797 areq = req->areq.curve25519; in hpre_curve25519_cb()
1798 areq->dst_len = ctx->key_sz; in hpre_curve25519_cb()
1804 hpre_key_to_big_end(sg_virt(areq->dst), CURVE25519_KEY_SIZE); in hpre_curve25519_cb()
1806 hpre_curve25519_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_curve25519_cb()
1820 if (unlikely(req->dst_len < ctx->key_sz)) { in hpre_curve25519_msg_request_set()
1821 req->dst_len = ctx->key_sz; in hpre_curve25519_msg_request_set()
1822 return -EINVAL; in hpre_curve25519_msg_request_set()
1827 h_req->cb = hpre_curve25519_cb; in hpre_curve25519_msg_request_set()
1828 h_req->areq.curve25519 = req; in hpre_curve25519_msg_request_set()
1829 msg = &h_req->req; in hpre_curve25519_msg_request_set()
1831 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_curve25519_msg_request_set()
1832 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_curve25519_msg_request_set()
1833 msg->key = cpu_to_le64(ctx->curve25519.dma_p); in hpre_curve25519_msg_request_set()
1835 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_curve25519_msg_request_set()
1836 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_curve25519_msg_request_set()
1837 h_req->ctx = ctx; in hpre_curve25519_msg_request_set()
1841 return -EBUSY; in hpre_curve25519_msg_request_set()
1843 msg->tag = cpu_to_le16((u16)req_id); in hpre_curve25519_msg_request_set()
1851 for (i = 0; i < CURVE25519_KEY_SIZE - 1; i++) in hpre_curve25519_src_modulo_p()
1854 /* The modulus is ptr's last byte minus '0xed'(last byte of p) */ in hpre_curve25519_src_modulo_p()
1855 ptr[i] -= 0xed; in hpre_curve25519_src_modulo_p()
1861 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_src_init()
1862 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_curve25519_src_init()
1863 struct device *dev = ctx->dev; in hpre_curve25519_src_init()
1864 u8 p[CURVE25519_KEY_SIZE] = { 0 }; in hpre_curve25519_src_init() local
1871 return -EINVAL; in hpre_curve25519_src_init()
1874 ptr = dma_alloc_coherent(dev, ctx->key_sz, &dma, GFP_KERNEL); in hpre_curve25519_src_init()
1876 return -ENOMEM; in hpre_curve25519_src_init()
1886 * Src_data(gx) is in little-endian order, MSB in the final byte should in hpre_curve25519_src_init()
1887 * be masked as described in RFC7748, then transform it to big-endian in hpre_curve25519_src_init()
1895 fill_curve_param(p, curve->p, CURVE25519_KEY_SIZE, curve->g.ndigits); in hpre_curve25519_src_init()
1898 * When src_data equals (2^255 - 19) ~ (2^255 - 1), it is out of p, in hpre_curve25519_src_init()
1899 * we get its modulus to p, and then use it. in hpre_curve25519_src_init()
1901 if (memcmp(ptr, p, ctx->key_sz) == 0) { in hpre_curve25519_src_init()
1902 dev_err(dev, "gx is p!\n"); in hpre_curve25519_src_init()
1904 } else if (memcmp(ptr, p, ctx->key_sz) > 0) { in hpre_curve25519_src_init()
1908 hpre_req->src = ptr; in hpre_curve25519_src_init()
1909 msg->in = cpu_to_le64(dma); in hpre_curve25519_src_init()
1913 dma_free_coherent(dev, ctx->key_sz, ptr, dma); in hpre_curve25519_src_init()
1914 return -EINVAL; in hpre_curve25519_src_init()
1920 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_dst_init()
1921 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_curve25519_dst_init()
1922 struct device *dev = ctx->dev; in hpre_curve25519_dst_init()
1925 if (!data || !sg_is_last(data) || len != ctx->key_sz) { in hpre_curve25519_dst_init()
1927 return -EINVAL; in hpre_curve25519_dst_init()
1930 hpre_req->dst = NULL; in hpre_curve25519_dst_init()
1934 return -ENOMEM; in hpre_curve25519_dst_init()
1937 msg->out = cpu_to_le64(dma); in hpre_curve25519_dst_init()
1945 struct device *dev = ctx->dev; in hpre_curve25519_compute_value()
1948 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_compute_value()
1957 if (req->src) { in hpre_curve25519_compute_value()
1958 ret = hpre_curve25519_src_init(hpre_req, req->src, req->src_len); in hpre_curve25519_compute_value()
1965 msg->in = cpu_to_le64(ctx->curve25519.dma_g); in hpre_curve25519_compute_value()
1968 ret = hpre_curve25519_dst_init(hpre_req, req->dst, req->dst_len); in hpre_curve25519_compute_value()
1974 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_CURVE25519_MUL); in hpre_curve25519_compute_value()
1977 return -EINPROGRESS; in hpre_curve25519_compute_value()
1981 hpre_curve25519_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_curve25519_compute_value()
1989 return ctx->key_sz; in hpre_curve25519_max_size()
2020 .cra_driver_name = "hpre-rsa",
2036 .cra_driver_name = "hpre-dh",
2052 .cra_name = "ecdh-nist-p192",
2053 .cra_driver_name = "hpre-ecdh-nist-p192",
2066 .cra_name = "ecdh-nist-p256",
2067 .cra_driver_name = "hpre-ecdh-nist-p256",
2080 .cra_name = "ecdh-nist-p384",
2081 .cra_driver_name = "hpre-ecdh-nist-p384",
2098 .cra_driver_name = "hpre-curve25519",
2113 dev_err(&qm->pdev->dev, "failed to register rsa (%d)!\n", ret); in hpre_register_rsa()
2135 dev_err(&qm->pdev->dev, "failed to register dh (%d)!\n", ret); in hpre_register_dh()
2158 dev_err(&qm->pdev->dev, "failed to register %s (%d)!\n", in hpre_register_ecdh()
2167 for (--i; i >= 0; --i) in hpre_register_ecdh()
2180 for (i = ARRAY_SIZE(ecdh_curves) - 1; i >= 0; --i) in hpre_unregister_ecdh()
2193 dev_err(&qm->pdev->dev, "failed to register x25519 (%d)!\n", ret); in hpre_register_x25519()
2251 if (--hpre_available_devs) in hpre_algs_unregister()