drm/tests: hdmi: Fix memory leaks in drm_display_mode_from_cea_vic()
[drm/drm-misc.git] / drivers / crypto / tegra / tegra-se-hash.c
blob4d4bd727f498691bba0a9777cdd7b91ef01fb8d9
1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3 /*
4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine.
5 */
7 #include <linux/clk.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/module.h>
10 #include <linux/of_device.h>
11 #include <linux/platform_device.h>
13 #include <crypto/aes.h>
14 #include <crypto/sha1.h>
15 #include <crypto/sha2.h>
16 #include <crypto/sha3.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/engine.h>
19 #include <crypto/scatterwalk.h>
20 #include <crypto/internal/hash.h>
22 #include "tegra-se.h"
24 struct tegra_sha_ctx {
25 struct tegra_se *se;
26 unsigned int alg;
27 bool fallback;
28 u32 key_id;
29 struct crypto_ahash *fallback_tfm;
32 struct tegra_sha_reqctx {
33 struct scatterlist *src_sg;
34 struct tegra_se_datbuf datbuf;
35 struct tegra_se_datbuf residue;
36 struct tegra_se_datbuf digest;
37 unsigned int alg;
38 unsigned int config;
39 unsigned int total_len;
40 unsigned int blk_size;
41 unsigned int task;
42 u32 key_id;
43 u32 result[HASH_RESULT_REG_COUNT];
44 struct ahash_request fallback_req;
47 static int tegra_sha_get_config(u32 alg)
49 int cfg = 0;
51 switch (alg) {
52 case SE_ALG_SHA1:
53 cfg |= SE_SHA_ENC_ALG_SHA;
54 cfg |= SE_SHA_ENC_MODE_SHA1;
55 break;
57 case SE_ALG_HMAC_SHA224:
58 cfg |= SE_SHA_ENC_ALG_HMAC;
59 fallthrough;
60 case SE_ALG_SHA224:
61 cfg |= SE_SHA_ENC_ALG_SHA;
62 cfg |= SE_SHA_ENC_MODE_SHA224;
63 break;
65 case SE_ALG_HMAC_SHA256:
66 cfg |= SE_SHA_ENC_ALG_HMAC;
67 fallthrough;
68 case SE_ALG_SHA256:
69 cfg |= SE_SHA_ENC_ALG_SHA;
70 cfg |= SE_SHA_ENC_MODE_SHA256;
71 break;
73 case SE_ALG_HMAC_SHA384:
74 cfg |= SE_SHA_ENC_ALG_HMAC;
75 fallthrough;
76 case SE_ALG_SHA384:
77 cfg |= SE_SHA_ENC_ALG_SHA;
78 cfg |= SE_SHA_ENC_MODE_SHA384;
79 break;
81 case SE_ALG_HMAC_SHA512:
82 cfg |= SE_SHA_ENC_ALG_HMAC;
83 fallthrough;
84 case SE_ALG_SHA512:
85 cfg |= SE_SHA_ENC_ALG_SHA;
86 cfg |= SE_SHA_ENC_MODE_SHA512;
87 break;
89 case SE_ALG_SHA3_224:
90 cfg |= SE_SHA_ENC_ALG_SHA;
91 cfg |= SE_SHA_ENC_MODE_SHA3_224;
92 break;
93 case SE_ALG_SHA3_256:
94 cfg |= SE_SHA_ENC_ALG_SHA;
95 cfg |= SE_SHA_ENC_MODE_SHA3_256;
96 break;
97 case SE_ALG_SHA3_384:
98 cfg |= SE_SHA_ENC_ALG_SHA;
99 cfg |= SE_SHA_ENC_MODE_SHA3_384;
100 break;
101 case SE_ALG_SHA3_512:
102 cfg |= SE_SHA_ENC_ALG_SHA;
103 cfg |= SE_SHA_ENC_MODE_SHA3_512;
104 break;
105 default:
106 return -EINVAL;
109 return cfg;
112 static int tegra_sha_fallback_init(struct ahash_request *req)
114 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
115 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
116 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
119 rctx->fallback_req.base.flags = req->base.flags &
120 CRYPTO_TFM_REQ_MAY_SLEEP;
122 return crypto_ahash_init(&rctx->fallback_req);
125 static int tegra_sha_fallback_update(struct ahash_request *req)
127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
128 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
129 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
132 rctx->fallback_req.base.flags = req->base.flags &
133 CRYPTO_TFM_REQ_MAY_SLEEP;
134 rctx->fallback_req.nbytes = req->nbytes;
135 rctx->fallback_req.src = req->src;
137 return crypto_ahash_update(&rctx->fallback_req);
140 static int tegra_sha_fallback_final(struct ahash_request *req)
142 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
143 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
144 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
146 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
147 rctx->fallback_req.base.flags = req->base.flags &
148 CRYPTO_TFM_REQ_MAY_SLEEP;
149 rctx->fallback_req.result = req->result;
151 return crypto_ahash_final(&rctx->fallback_req);
154 static int tegra_sha_fallback_finup(struct ahash_request *req)
156 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
157 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
158 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
160 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
161 rctx->fallback_req.base.flags = req->base.flags &
162 CRYPTO_TFM_REQ_MAY_SLEEP;
164 rctx->fallback_req.nbytes = req->nbytes;
165 rctx->fallback_req.src = req->src;
166 rctx->fallback_req.result = req->result;
168 return crypto_ahash_finup(&rctx->fallback_req);
171 static int tegra_sha_fallback_digest(struct ahash_request *req)
173 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
174 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
175 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
177 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
178 rctx->fallback_req.base.flags = req->base.flags &
179 CRYPTO_TFM_REQ_MAY_SLEEP;
181 rctx->fallback_req.nbytes = req->nbytes;
182 rctx->fallback_req.src = req->src;
183 rctx->fallback_req.result = req->result;
185 return crypto_ahash_digest(&rctx->fallback_req);
188 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in)
190 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
191 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
192 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
194 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
195 rctx->fallback_req.base.flags = req->base.flags &
196 CRYPTO_TFM_REQ_MAY_SLEEP;
198 return crypto_ahash_import(&rctx->fallback_req, in);
201 static int tegra_sha_fallback_export(struct ahash_request *req, void *out)
203 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
204 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
205 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
207 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
208 rctx->fallback_req.base.flags = req->base.flags &
209 CRYPTO_TFM_REQ_MAY_SLEEP;
211 return crypto_ahash_export(&rctx->fallback_req, out);
214 static int tegra_sha_prep_cmd(struct tegra_se *se, u32 *cpuvaddr,
215 struct tegra_sha_reqctx *rctx)
217 u64 msg_len, msg_left;
218 int i = 0;
220 msg_len = rctx->total_len * 8;
221 msg_left = rctx->datbuf.size * 8;
224 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine,
225 * HW treats it as the last buffer and process the data.
226 * Therefore, add an extra byte to msg_left if it is not the
227 * last buffer.
229 if (rctx->task & SHA_UPDATE) {
230 msg_left += 8;
231 msg_len += 8;
234 cpuvaddr[i++] = host1x_opcode_setpayload(8);
235 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH);
236 cpuvaddr[i++] = lower_32_bits(msg_len);
237 cpuvaddr[i++] = upper_32_bits(msg_len);
238 cpuvaddr[i++] = 0;
239 cpuvaddr[i++] = 0;
240 cpuvaddr[i++] = lower_32_bits(msg_left);
241 cpuvaddr[i++] = upper_32_bits(msg_left);
242 cpuvaddr[i++] = 0;
243 cpuvaddr[i++] = 0;
244 cpuvaddr[i++] = host1x_opcode_setpayload(6);
245 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG);
246 cpuvaddr[i++] = rctx->config;
248 if (rctx->task & SHA_FIRST) {
249 cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT;
250 rctx->task &= ~SHA_FIRST;
251 } else {
252 cpuvaddr[i++] = 0;
255 cpuvaddr[i++] = rctx->datbuf.addr;
256 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) |
257 SE_ADDR_HI_SZ(rctx->datbuf.size));
258 cpuvaddr[i++] = rctx->digest.addr;
259 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) |
260 SE_ADDR_HI_SZ(rctx->digest.size));
261 if (rctx->key_id) {
262 cpuvaddr[i++] = host1x_opcode_setpayload(1);
263 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG);
264 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id);
267 cpuvaddr[i++] = host1x_opcode_setpayload(1);
268 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION);
269 cpuvaddr[i++] = SE_SHA_OP_WRSTALL |
270 SE_SHA_OP_START |
271 SE_SHA_OP_LASTBUF;
272 cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
273 cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
274 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
276 dev_dbg(se->dev, "msg len %llu msg left %llu cfg %#x",
277 msg_len, msg_left, rctx->config);
279 return i;
282 static void tegra_sha_copy_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx)
284 int i;
286 for (i = 0; i < HASH_RESULT_REG_COUNT; i++)
287 rctx->result[i] = readl(se->base + se->hw->regs->result + (i * 4));
290 static void tegra_sha_paste_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx)
292 int i;
294 for (i = 0; i < HASH_RESULT_REG_COUNT; i++)
295 writel(rctx->result[i],
296 se->base + se->hw->regs->result + (i * 4));
299 static int tegra_sha_do_update(struct ahash_request *req)
301 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
302 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
303 unsigned int nblks, nresidue, size, ret;
304 u32 *cpuvaddr = ctx->se->cmdbuf->addr;
306 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size;
307 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size;
310 * If nbytes is a multiple of block size and there is no residue,
311 * then reserve the last block as residue during final() to process.
313 if (!nresidue && nblks) {
314 nresidue = rctx->blk_size;
315 nblks--;
318 rctx->src_sg = req->src;
319 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue;
320 rctx->total_len += rctx->datbuf.size;
323 * If nbytes are less than a block size, copy it residue and
324 * return. The bytes will be processed in final()
326 if (nblks < 1) {
327 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size,
328 rctx->src_sg, 0, req->nbytes, 0);
330 rctx->residue.size += req->nbytes;
331 return 0;
334 /* Copy the previous residue first */
335 if (rctx->residue.size)
336 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
338 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size,
339 rctx->src_sg, 0, req->nbytes - nresidue, 0);
341 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg,
342 req->nbytes - nresidue, nresidue, 0);
344 /* Update residue value with the residue after current block */
345 rctx->residue.size = nresidue;
347 rctx->config = tegra_sha_get_config(rctx->alg) |
348 SE_SHA_DST_HASH_REG;
351 * If this is not the first 'update' call, paste the previous copied
352 * intermediate results to the registers so that it gets picked up.
353 * This is to support the import/export functionality.
355 if (!(rctx->task & SHA_FIRST))
356 tegra_sha_paste_hash_result(ctx->se, rctx);
358 size = tegra_sha_prep_cmd(ctx->se, cpuvaddr, rctx);
360 ret = tegra_se_host1x_submit(ctx->se, size);
363 * If this is not the final update, copy the intermediate results
364 * from the registers so that it can be used in the next 'update'
365 * call. This is to support the import/export functionality.
367 if (!(rctx->task & SHA_FINAL))
368 tegra_sha_copy_hash_result(ctx->se, rctx);
370 return ret;
373 static int tegra_sha_do_final(struct ahash_request *req)
375 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
376 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
377 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
378 struct tegra_se *se = ctx->se;
379 u32 *cpuvaddr = se->cmdbuf->addr;
380 int size, ret = 0;
382 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
383 rctx->datbuf.size = rctx->residue.size;
384 rctx->total_len += rctx->residue.size;
386 rctx->config = tegra_sha_get_config(rctx->alg) |
387 SE_SHA_DST_MEMORY;
389 size = tegra_sha_prep_cmd(se, cpuvaddr, rctx);
391 ret = tegra_se_host1x_submit(se, size);
392 if (ret)
393 goto out;
395 /* Copy result */
396 memcpy(req->result, rctx->digest.buf, rctx->digest.size);
398 out:
399 dma_free_coherent(se->dev, SE_SHA_BUFLEN,
400 rctx->datbuf.buf, rctx->datbuf.addr);
401 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm),
402 rctx->residue.buf, rctx->residue.addr);
403 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
404 rctx->digest.addr);
405 return ret;
408 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq)
410 struct ahash_request *req = ahash_request_cast(areq);
411 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
412 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
413 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
414 struct tegra_se *se = ctx->se;
415 int ret = 0;
417 if (rctx->task & SHA_UPDATE) {
418 ret = tegra_sha_do_update(req);
419 rctx->task &= ~SHA_UPDATE;
422 if (rctx->task & SHA_FINAL) {
423 ret = tegra_sha_do_final(req);
424 rctx->task &= ~SHA_FINAL;
427 crypto_finalize_hash_request(se->engine, req, ret);
429 return 0;
432 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx,
433 const char *algname)
435 unsigned int statesize;
437 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC |
438 CRYPTO_ALG_NEED_FALLBACK);
440 if (IS_ERR(ctx->fallback_tfm)) {
441 dev_warn(ctx->se->dev,
442 "failed to allocate fallback for %s\n", algname);
443 ctx->fallback_tfm = NULL;
444 return;
447 statesize = crypto_ahash_statesize(ctx->fallback_tfm);
449 if (statesize > sizeof(struct tegra_sha_reqctx))
450 crypto_ahash_set_statesize(tfm, statesize);
452 /* Update reqsize if fallback is added */
453 crypto_ahash_set_reqsize(tfm,
454 sizeof(struct tegra_sha_reqctx) +
455 crypto_ahash_reqsize(ctx->fallback_tfm));
458 static int tegra_sha_cra_init(struct crypto_tfm *tfm)
460 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
461 struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm);
462 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
463 struct tegra_se_alg *se_alg;
464 const char *algname;
465 int ret;
467 algname = crypto_tfm_alg_name(tfm);
468 se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base);
470 crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx));
472 ctx->se = se_alg->se_dev;
473 ctx->fallback = false;
474 ctx->key_id = 0;
476 ret = se_algname_to_algid(algname);
477 if (ret < 0) {
478 dev_err(ctx->se->dev, "invalid algorithm\n");
479 return ret;
482 if (se_alg->alg_base)
483 tegra_sha_init_fallback(ahash_tfm, ctx, algname);
485 ctx->alg = ret;
487 return 0;
490 static void tegra_sha_cra_exit(struct crypto_tfm *tfm)
492 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
494 if (ctx->fallback_tfm)
495 crypto_free_ahash(ctx->fallback_tfm);
497 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg);
500 static int tegra_sha_init(struct ahash_request *req)
502 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
503 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
504 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
505 struct tegra_se *se = ctx->se;
507 if (ctx->fallback)
508 return tegra_sha_fallback_init(req);
510 rctx->total_len = 0;
511 rctx->datbuf.size = 0;
512 rctx->residue.size = 0;
513 rctx->key_id = ctx->key_id;
514 rctx->task = SHA_FIRST;
515 rctx->alg = ctx->alg;
516 rctx->blk_size = crypto_ahash_blocksize(tfm);
517 rctx->digest.size = crypto_ahash_digestsize(tfm);
519 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size,
520 &rctx->digest.addr, GFP_KERNEL);
521 if (!rctx->digest.buf)
522 goto digbuf_fail;
524 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size,
525 &rctx->residue.addr, GFP_KERNEL);
526 if (!rctx->residue.buf)
527 goto resbuf_fail;
529 rctx->datbuf.buf = dma_alloc_coherent(se->dev, SE_SHA_BUFLEN,
530 &rctx->datbuf.addr, GFP_KERNEL);
531 if (!rctx->datbuf.buf)
532 goto datbuf_fail;
534 return 0;
536 datbuf_fail:
537 dma_free_coherent(se->dev, rctx->blk_size, rctx->residue.buf,
538 rctx->residue.addr);
539 resbuf_fail:
540 dma_free_coherent(se->dev, SE_SHA_BUFLEN, rctx->datbuf.buf,
541 rctx->datbuf.addr);
542 digbuf_fail:
543 return -ENOMEM;
546 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key,
547 unsigned int keylen)
549 if (!ctx->fallback_tfm) {
550 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen);
551 return -EINVAL;
554 ctx->fallback = true;
555 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen);
558 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key,
559 unsigned int keylen)
561 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
563 if (aes_check_keylen(keylen))
564 return tegra_hmac_fallback_setkey(ctx, key, keylen);
566 ctx->fallback = false;
568 return tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id);
571 static int tegra_sha_update(struct ahash_request *req)
573 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
574 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
575 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
577 if (ctx->fallback)
578 return tegra_sha_fallback_update(req);
580 rctx->task |= SHA_UPDATE;
582 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
585 static int tegra_sha_final(struct ahash_request *req)
587 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
588 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
589 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
591 if (ctx->fallback)
592 return tegra_sha_fallback_final(req);
594 rctx->task |= SHA_FINAL;
596 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
599 static int tegra_sha_finup(struct ahash_request *req)
601 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
602 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
603 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
605 if (ctx->fallback)
606 return tegra_sha_fallback_finup(req);
608 rctx->task |= SHA_UPDATE | SHA_FINAL;
610 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
613 static int tegra_sha_digest(struct ahash_request *req)
615 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
616 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
617 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
619 if (ctx->fallback)
620 return tegra_sha_fallback_digest(req);
622 tegra_sha_init(req);
623 rctx->task |= SHA_UPDATE | SHA_FINAL;
625 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
628 static int tegra_sha_export(struct ahash_request *req, void *out)
630 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
631 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
632 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
634 if (ctx->fallback)
635 return tegra_sha_fallback_export(req, out);
637 memcpy(out, rctx, sizeof(*rctx));
639 return 0;
642 static int tegra_sha_import(struct ahash_request *req, const void *in)
644 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
645 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
646 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
648 if (ctx->fallback)
649 return tegra_sha_fallback_import(req, in);
651 memcpy(rctx, in, sizeof(*rctx));
653 return 0;
656 static struct tegra_se_alg tegra_hash_algs[] = {
658 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
659 .alg.ahash.base = {
660 .init = tegra_sha_init,
661 .update = tegra_sha_update,
662 .final = tegra_sha_final,
663 .finup = tegra_sha_finup,
664 .digest = tegra_sha_digest,
665 .export = tegra_sha_export,
666 .import = tegra_sha_import,
667 .halg.digestsize = SHA1_DIGEST_SIZE,
668 .halg.statesize = sizeof(struct tegra_sha_reqctx),
669 .halg.base = {
670 .cra_name = "sha1",
671 .cra_driver_name = "tegra-se-sha1",
672 .cra_priority = 300,
673 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
674 .cra_blocksize = SHA1_BLOCK_SIZE,
675 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
676 .cra_alignmask = 0,
677 .cra_module = THIS_MODULE,
678 .cra_init = tegra_sha_cra_init,
679 .cra_exit = tegra_sha_cra_exit,
682 }, {
683 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
684 .alg.ahash.base = {
685 .init = tegra_sha_init,
686 .update = tegra_sha_update,
687 .final = tegra_sha_final,
688 .finup = tegra_sha_finup,
689 .digest = tegra_sha_digest,
690 .export = tegra_sha_export,
691 .import = tegra_sha_import,
692 .halg.digestsize = SHA224_DIGEST_SIZE,
693 .halg.statesize = sizeof(struct tegra_sha_reqctx),
694 .halg.base = {
695 .cra_name = "sha224",
696 .cra_driver_name = "tegra-se-sha224",
697 .cra_priority = 300,
698 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
699 .cra_blocksize = SHA224_BLOCK_SIZE,
700 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
701 .cra_alignmask = 0,
702 .cra_module = THIS_MODULE,
703 .cra_init = tegra_sha_cra_init,
704 .cra_exit = tegra_sha_cra_exit,
707 }, {
708 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
709 .alg.ahash.base = {
710 .init = tegra_sha_init,
711 .update = tegra_sha_update,
712 .final = tegra_sha_final,
713 .finup = tegra_sha_finup,
714 .digest = tegra_sha_digest,
715 .export = tegra_sha_export,
716 .import = tegra_sha_import,
717 .halg.digestsize = SHA256_DIGEST_SIZE,
718 .halg.statesize = sizeof(struct tegra_sha_reqctx),
719 .halg.base = {
720 .cra_name = "sha256",
721 .cra_driver_name = "tegra-se-sha256",
722 .cra_priority = 300,
723 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
724 .cra_blocksize = SHA256_BLOCK_SIZE,
725 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
726 .cra_alignmask = 0,
727 .cra_module = THIS_MODULE,
728 .cra_init = tegra_sha_cra_init,
729 .cra_exit = tegra_sha_cra_exit,
732 }, {
733 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
734 .alg.ahash.base = {
735 .init = tegra_sha_init,
736 .update = tegra_sha_update,
737 .final = tegra_sha_final,
738 .finup = tegra_sha_finup,
739 .digest = tegra_sha_digest,
740 .export = tegra_sha_export,
741 .import = tegra_sha_import,
742 .halg.digestsize = SHA384_DIGEST_SIZE,
743 .halg.statesize = sizeof(struct tegra_sha_reqctx),
744 .halg.base = {
745 .cra_name = "sha384",
746 .cra_driver_name = "tegra-se-sha384",
747 .cra_priority = 300,
748 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
749 .cra_blocksize = SHA384_BLOCK_SIZE,
750 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
751 .cra_alignmask = 0,
752 .cra_module = THIS_MODULE,
753 .cra_init = tegra_sha_cra_init,
754 .cra_exit = tegra_sha_cra_exit,
757 }, {
758 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
759 .alg.ahash.base = {
760 .init = tegra_sha_init,
761 .update = tegra_sha_update,
762 .final = tegra_sha_final,
763 .finup = tegra_sha_finup,
764 .digest = tegra_sha_digest,
765 .export = tegra_sha_export,
766 .import = tegra_sha_import,
767 .halg.digestsize = SHA512_DIGEST_SIZE,
768 .halg.statesize = sizeof(struct tegra_sha_reqctx),
769 .halg.base = {
770 .cra_name = "sha512",
771 .cra_driver_name = "tegra-se-sha512",
772 .cra_priority = 300,
773 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
774 .cra_blocksize = SHA512_BLOCK_SIZE,
775 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
776 .cra_alignmask = 0,
777 .cra_module = THIS_MODULE,
778 .cra_init = tegra_sha_cra_init,
779 .cra_exit = tegra_sha_cra_exit,
782 }, {
783 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
784 .alg.ahash.base = {
785 .init = tegra_sha_init,
786 .update = tegra_sha_update,
787 .final = tegra_sha_final,
788 .finup = tegra_sha_finup,
789 .digest = tegra_sha_digest,
790 .export = tegra_sha_export,
791 .import = tegra_sha_import,
792 .halg.digestsize = SHA3_224_DIGEST_SIZE,
793 .halg.statesize = sizeof(struct tegra_sha_reqctx),
794 .halg.base = {
795 .cra_name = "sha3-224",
796 .cra_driver_name = "tegra-se-sha3-224",
797 .cra_priority = 300,
798 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
799 .cra_blocksize = SHA3_224_BLOCK_SIZE,
800 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
801 .cra_alignmask = 0,
802 .cra_module = THIS_MODULE,
803 .cra_init = tegra_sha_cra_init,
804 .cra_exit = tegra_sha_cra_exit,
807 }, {
808 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
809 .alg.ahash.base = {
810 .init = tegra_sha_init,
811 .update = tegra_sha_update,
812 .final = tegra_sha_final,
813 .finup = tegra_sha_finup,
814 .digest = tegra_sha_digest,
815 .export = tegra_sha_export,
816 .import = tegra_sha_import,
817 .halg.digestsize = SHA3_256_DIGEST_SIZE,
818 .halg.statesize = sizeof(struct tegra_sha_reqctx),
819 .halg.base = {
820 .cra_name = "sha3-256",
821 .cra_driver_name = "tegra-se-sha3-256",
822 .cra_priority = 300,
823 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
824 .cra_blocksize = SHA3_256_BLOCK_SIZE,
825 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
826 .cra_alignmask = 0,
827 .cra_module = THIS_MODULE,
828 .cra_init = tegra_sha_cra_init,
829 .cra_exit = tegra_sha_cra_exit,
832 }, {
833 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
834 .alg.ahash.base = {
835 .init = tegra_sha_init,
836 .update = tegra_sha_update,
837 .final = tegra_sha_final,
838 .finup = tegra_sha_finup,
839 .digest = tegra_sha_digest,
840 .export = tegra_sha_export,
841 .import = tegra_sha_import,
842 .halg.digestsize = SHA3_384_DIGEST_SIZE,
843 .halg.statesize = sizeof(struct tegra_sha_reqctx),
844 .halg.base = {
845 .cra_name = "sha3-384",
846 .cra_driver_name = "tegra-se-sha3-384",
847 .cra_priority = 300,
848 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
849 .cra_blocksize = SHA3_384_BLOCK_SIZE,
850 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
851 .cra_alignmask = 0,
852 .cra_module = THIS_MODULE,
853 .cra_init = tegra_sha_cra_init,
854 .cra_exit = tegra_sha_cra_exit,
857 }, {
858 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
859 .alg.ahash.base = {
860 .init = tegra_sha_init,
861 .update = tegra_sha_update,
862 .final = tegra_sha_final,
863 .finup = tegra_sha_finup,
864 .digest = tegra_sha_digest,
865 .export = tegra_sha_export,
866 .import = tegra_sha_import,
867 .halg.digestsize = SHA3_512_DIGEST_SIZE,
868 .halg.statesize = sizeof(struct tegra_sha_reqctx),
869 .halg.base = {
870 .cra_name = "sha3-512",
871 .cra_driver_name = "tegra-se-sha3-512",
872 .cra_priority = 300,
873 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
874 .cra_blocksize = SHA3_512_BLOCK_SIZE,
875 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
876 .cra_alignmask = 0,
877 .cra_module = THIS_MODULE,
878 .cra_init = tegra_sha_cra_init,
879 .cra_exit = tegra_sha_cra_exit,
882 }, {
883 .alg_base = "sha224",
884 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
885 .alg.ahash.base = {
886 .init = tegra_sha_init,
887 .update = tegra_sha_update,
888 .final = tegra_sha_final,
889 .finup = tegra_sha_finup,
890 .digest = tegra_sha_digest,
891 .export = tegra_sha_export,
892 .import = tegra_sha_import,
893 .setkey = tegra_hmac_setkey,
894 .halg.digestsize = SHA224_DIGEST_SIZE,
895 .halg.statesize = sizeof(struct tegra_sha_reqctx),
896 .halg.base = {
897 .cra_name = "hmac(sha224)",
898 .cra_driver_name = "tegra-se-hmac-sha224",
899 .cra_priority = 300,
900 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
901 .cra_blocksize = SHA224_BLOCK_SIZE,
902 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
903 .cra_alignmask = 0,
904 .cra_module = THIS_MODULE,
905 .cra_init = tegra_sha_cra_init,
906 .cra_exit = tegra_sha_cra_exit,
909 }, {
910 .alg_base = "sha256",
911 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
912 .alg.ahash.base = {
913 .init = tegra_sha_init,
914 .update = tegra_sha_update,
915 .final = tegra_sha_final,
916 .finup = tegra_sha_finup,
917 .digest = tegra_sha_digest,
918 .export = tegra_sha_export,
919 .import = tegra_sha_import,
920 .setkey = tegra_hmac_setkey,
921 .halg.digestsize = SHA256_DIGEST_SIZE,
922 .halg.statesize = sizeof(struct tegra_sha_reqctx),
923 .halg.base = {
924 .cra_name = "hmac(sha256)",
925 .cra_driver_name = "tegra-se-hmac-sha256",
926 .cra_priority = 300,
927 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
928 .cra_blocksize = SHA256_BLOCK_SIZE,
929 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
930 .cra_alignmask = 0,
931 .cra_module = THIS_MODULE,
932 .cra_init = tegra_sha_cra_init,
933 .cra_exit = tegra_sha_cra_exit,
936 }, {
937 .alg_base = "sha384",
938 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
939 .alg.ahash.base = {
940 .init = tegra_sha_init,
941 .update = tegra_sha_update,
942 .final = tegra_sha_final,
943 .finup = tegra_sha_finup,
944 .digest = tegra_sha_digest,
945 .export = tegra_sha_export,
946 .import = tegra_sha_import,
947 .setkey = tegra_hmac_setkey,
948 .halg.digestsize = SHA384_DIGEST_SIZE,
949 .halg.statesize = sizeof(struct tegra_sha_reqctx),
950 .halg.base = {
951 .cra_name = "hmac(sha384)",
952 .cra_driver_name = "tegra-se-hmac-sha384",
953 .cra_priority = 300,
954 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
955 .cra_blocksize = SHA384_BLOCK_SIZE,
956 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
957 .cra_alignmask = 0,
958 .cra_module = THIS_MODULE,
959 .cra_init = tegra_sha_cra_init,
960 .cra_exit = tegra_sha_cra_exit,
963 }, {
964 .alg_base = "sha512",
965 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
966 .alg.ahash.base = {
967 .init = tegra_sha_init,
968 .update = tegra_sha_update,
969 .final = tegra_sha_final,
970 .finup = tegra_sha_finup,
971 .digest = tegra_sha_digest,
972 .export = tegra_sha_export,
973 .import = tegra_sha_import,
974 .setkey = tegra_hmac_setkey,
975 .halg.digestsize = SHA512_DIGEST_SIZE,
976 .halg.statesize = sizeof(struct tegra_sha_reqctx),
977 .halg.base = {
978 .cra_name = "hmac(sha512)",
979 .cra_driver_name = "tegra-se-hmac-sha512",
980 .cra_priority = 300,
981 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
982 .cra_blocksize = SHA512_BLOCK_SIZE,
983 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
984 .cra_alignmask = 0,
985 .cra_module = THIS_MODULE,
986 .cra_init = tegra_sha_cra_init,
987 .cra_exit = tegra_sha_cra_exit,
993 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen)
995 int manifest;
997 manifest = SE_KAC_USER_NS;
999 switch (alg) {
1000 case SE_ALG_HMAC_SHA224:
1001 case SE_ALG_HMAC_SHA256:
1002 case SE_ALG_HMAC_SHA384:
1003 case SE_ALG_HMAC_SHA512:
1004 manifest |= SE_KAC_HMAC;
1005 break;
1006 default:
1007 return -EINVAL;
1010 switch (keylen) {
1011 case AES_KEYSIZE_128:
1012 manifest |= SE_KAC_SIZE_128;
1013 break;
1014 case AES_KEYSIZE_192:
1015 manifest |= SE_KAC_SIZE_192;
1016 break;
1017 case AES_KEYSIZE_256:
1018 default:
1019 manifest |= SE_KAC_SIZE_256;
1020 break;
1023 return manifest;
1026 int tegra_init_hash(struct tegra_se *se)
1028 struct ahash_engine_alg *alg;
1029 int i, ret;
1031 se->manifest = tegra_hash_kac_manifest;
1033 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) {
1034 tegra_hash_algs[i].se_dev = se;
1035 alg = &tegra_hash_algs[i].alg.ahash;
1037 ret = crypto_engine_register_ahash(alg);
1038 if (ret) {
1039 dev_err(se->dev, "failed to register %s\n",
1040 alg->base.halg.base.cra_name);
1041 goto sha_err;
1045 return 0;
1047 sha_err:
1048 while (i--)
1049 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1051 return ret;
1054 void tegra_deinit_hash(struct tegra_se *se)
1056 int i;
1058 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++)
1059 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);